|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.6416382252559727, |
|
"acc_stderr": 0.014012883334859864, |
|
"acc_norm": 0.6902730375426621, |
|
"acc_norm_stderr": 0.01351205841523836 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3943437562238598, |
|
"acc_stderr": 0.004877104939356235, |
|
"acc_norm": 0.5182234614618602, |
|
"acc_norm_stderr": 0.0049864661516987735 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5847953216374269, |
|
"acc_stderr": 0.03779275945503201, |
|
"acc_norm": 0.5847953216374269, |
|
"acc_norm_stderr": 0.03779275945503201 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6019417475728155, |
|
"acc_stderr": 0.048467482539772386, |
|
"acc_norm": 0.6019417475728155, |
|
"acc_norm_stderr": 0.048467482539772386 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6104725415070242, |
|
"acc_stderr": 0.0174380825562646, |
|
"acc_norm": 0.6104725415070242, |
|
"acc_norm_stderr": 0.0174380825562646 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3925925925925926, |
|
"acc_stderr": 0.042185062153688786, |
|
"acc_norm": 0.3925925925925926, |
|
"acc_norm_stderr": 0.042185062153688786 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4340425531914894, |
|
"acc_stderr": 0.032400380867927465, |
|
"acc_norm": 0.4340425531914894, |
|
"acc_norm_stderr": 0.032400380867927465 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5180722891566265, |
|
"acc_stderr": 0.038899512528272166, |
|
"acc_norm": 0.5180722891566265, |
|
"acc_norm_stderr": 0.038899512528272166 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5884244372990354, |
|
"acc_stderr": 0.027950481494401266, |
|
"acc_norm": 0.5884244372990354, |
|
"acc_norm_stderr": 0.027950481494401266 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5829596412556054, |
|
"acc_stderr": 0.03309266936071721, |
|
"acc_norm": 0.5829596412556054, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5190839694656488, |
|
"acc_stderr": 0.043820947055509867, |
|
"acc_norm": 0.5190839694656488, |
|
"acc_norm_stderr": 0.043820947055509867 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.03427308652999935, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.03427308652999935 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5241379310344828, |
|
"acc_stderr": 0.0416180850350153, |
|
"acc_norm": 0.5241379310344828, |
|
"acc_norm_stderr": 0.0416180850350153 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.045766654032077636, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.045766654032077636 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5210084033613446, |
|
"acc_stderr": 0.032449808499900284, |
|
"acc_norm": 0.5210084033613446, |
|
"acc_norm_stderr": 0.032449808499900284 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5487179487179488, |
|
"acc_stderr": 0.02523038123893484, |
|
"acc_norm": 0.5487179487179488, |
|
"acc_norm_stderr": 0.02523038123893484 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.048262172941398944, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.048262172941398944 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.03499113137676744, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.03499113137676744 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5741935483870968, |
|
"acc_stderr": 0.028129112709165904, |
|
"acc_norm": 0.5741935483870968, |
|
"acc_norm_stderr": 0.028129112709165904 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7649572649572649, |
|
"acc_stderr": 0.02777883590493543, |
|
"acc_norm": 0.7649572649572649, |
|
"acc_norm_stderr": 0.02777883590493543 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5169811320754717, |
|
"acc_stderr": 0.030755120364119898, |
|
"acc_norm": 0.5169811320754717, |
|
"acc_norm_stderr": 0.030755120364119898 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.32592592592592595, |
|
"acc_stderr": 0.02857834836547307, |
|
"acc_norm": 0.32592592592592595, |
|
"acc_norm_stderr": 0.02857834836547307 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.038227469376587525, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.038227469376587525 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6567164179104478, |
|
"acc_stderr": 0.03357379665433431, |
|
"acc_norm": 0.6567164179104478, |
|
"acc_norm_stderr": 0.03357379665433431 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4797687861271676, |
|
"acc_stderr": 0.03809342081273957, |
|
"acc_norm": 0.4797687861271676, |
|
"acc_norm_stderr": 0.03809342081273957 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3783068783068783, |
|
"acc_stderr": 0.024976954053155257, |
|
"acc_norm": 0.3783068783068783, |
|
"acc_norm_stderr": 0.024976954053155257 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4375, |
|
"acc_stderr": 0.04148415739394154, |
|
"acc_norm": 0.4375, |
|
"acc_norm_stderr": 0.04148415739394154 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.71, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.71, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5289017341040463, |
|
"acc_stderr": 0.02687408588351835, |
|
"acc_norm": 0.5289017341040463, |
|
"acc_norm_stderr": 0.02687408588351835 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5705521472392638, |
|
"acc_stderr": 0.03889066619112723, |
|
"acc_norm": 0.5705521472392638, |
|
"acc_norm_stderr": 0.03889066619112723 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5679012345679012, |
|
"acc_stderr": 0.02756301097160668, |
|
"acc_norm": 0.5679012345679012, |
|
"acc_norm_stderr": 0.02756301097160668 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6632124352331606, |
|
"acc_stderr": 0.03410780251836184, |
|
"acc_norm": 0.6632124352331606, |
|
"acc_norm_stderr": 0.03410780251836184 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.04372748290278007, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.04372748290278007 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.689908256880734, |
|
"acc_stderr": 0.019830849684439756, |
|
"acc_norm": 0.689908256880734, |
|
"acc_norm_stderr": 0.019830849684439756 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3253968253968254, |
|
"acc_stderr": 0.041905964388711366, |
|
"acc_norm": 0.3253968253968254, |
|
"acc_norm_stderr": 0.041905964388711366 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5228758169934641, |
|
"acc_stderr": 0.028599936776089775, |
|
"acc_norm": 0.5228758169934641, |
|
"acc_norm_stderr": 0.028599936776089775 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.04294340845212093, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.04294340845212093 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5460526315789473, |
|
"acc_stderr": 0.04051646342874143, |
|
"acc_norm": 0.5460526315789473, |
|
"acc_norm_stderr": 0.04051646342874143 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4803921568627451, |
|
"acc_stderr": 0.020212274976302957, |
|
"acc_norm": 0.4803921568627451, |
|
"acc_norm_stderr": 0.020212274976302957 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.41134751773049644, |
|
"acc_stderr": 0.029354911159940985, |
|
"acc_norm": 0.41134751773049644, |
|
"acc_norm_stderr": 0.029354911159940985 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3482142857142857, |
|
"acc_stderr": 0.04521829902833585, |
|
"acc_norm": 0.3482142857142857, |
|
"acc_norm_stderr": 0.04521829902833585 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.03362277436608043, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.03362277436608043 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23798882681564246, |
|
"acc_stderr": 0.014242630070574892, |
|
"acc_norm": 0.23798882681564246, |
|
"acc_norm_stderr": 0.014242630070574892 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.0497569851956243, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.0497569851956243 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.45588235294117646, |
|
"acc_stderr": 0.030254372573976687, |
|
"acc_norm": 0.45588235294117646, |
|
"acc_norm_stderr": 0.030254372573976687 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5877551020408164, |
|
"acc_stderr": 0.0315123604467427, |
|
"acc_norm": 0.5877551020408164, |
|
"acc_norm_stderr": 0.0315123604467427 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5907172995780591, |
|
"acc_stderr": 0.032007041833595914, |
|
"acc_norm": 0.5907172995780591, |
|
"acc_norm_stderr": 0.032007041833595914 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4048239895697523, |
|
"acc_stderr": 0.012536743830953979, |
|
"acc_norm": 0.4048239895697523, |
|
"acc_norm_stderr": 0.012536743830953979 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5294117647058824, |
|
"acc_stderr": 0.03503235296367992, |
|
"acc_norm": 0.5294117647058824, |
|
"acc_norm_stderr": 0.03503235296367992 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.0390369864774844, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.0390369864774844 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.5201958384332925, |
|
"mc1_stderr": 0.01748921684973705, |
|
"mc2": 0.6350127563212159, |
|
"mc2_stderr": 0.015049263970699864 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.43919716646989376, |
|
"acc_stderr": 0.0170627757447807, |
|
"acc_norm": 0.4817001180637544, |
|
"acc_norm_stderr": 0.017178836639177766 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Deepnoid/deep-solar-eeve-v2.0.0", |
|
"model_sha": "fc10ad6c60e72832c4181a386acb17c898e35407", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |