|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.46501706484641636, |
|
"acc_stderr": 0.014575583922019667, |
|
"acc_norm": 0.5273037542662116, |
|
"acc_norm_stderr": 0.014589589101985994 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4630551682931687, |
|
"acc_stderr": 0.00497614145773688, |
|
"acc_norm": 0.6409081856203943, |
|
"acc_norm_stderr": 0.004787537385153014 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.7134502923976608, |
|
"acc_stderr": 0.03467826685703826, |
|
"acc_norm": 0.7134502923976608, |
|
"acc_norm_stderr": 0.03467826685703826 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6893203883495146, |
|
"acc_stderr": 0.04582124160161552, |
|
"acc_norm": 0.6893203883495146, |
|
"acc_norm_stderr": 0.04582124160161552 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.7203065134099617, |
|
"acc_stderr": 0.016050792148036563, |
|
"acc_norm": 0.7203065134099617, |
|
"acc_norm_stderr": 0.016050792148036563 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.042925967182569816, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.042925967182569816 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.5361702127659574, |
|
"acc_stderr": 0.03260038511835771, |
|
"acc_norm": 0.5361702127659574, |
|
"acc_norm_stderr": 0.03260038511835771 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5180722891566265, |
|
"acc_stderr": 0.03889951252827216, |
|
"acc_norm": 0.5180722891566265, |
|
"acc_norm_stderr": 0.03889951252827216 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6045016077170418, |
|
"acc_stderr": 0.027770918531427838, |
|
"acc_norm": 0.6045016077170418, |
|
"acc_norm_stderr": 0.027770918531427838 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5919282511210763, |
|
"acc_stderr": 0.03298574607842822, |
|
"acc_norm": 0.5919282511210763, |
|
"acc_norm_stderr": 0.03298574607842822 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6335877862595419, |
|
"acc_stderr": 0.04225875451969639, |
|
"acc_norm": 0.6335877862595419, |
|
"acc_norm_stderr": 0.04225875451969639 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7575757575757576, |
|
"acc_stderr": 0.030532892233932046, |
|
"acc_norm": 0.7575757575757576, |
|
"acc_norm_stderr": 0.030532892233932046 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5310344827586206, |
|
"acc_stderr": 0.04158632762097828, |
|
"acc_norm": 0.5310344827586206, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3431372549019608, |
|
"acc_stderr": 0.04724007352383887, |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.04724007352383887 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6554621848739496, |
|
"acc_stderr": 0.030868682604121622, |
|
"acc_norm": 0.6554621848739496, |
|
"acc_norm_stderr": 0.030868682604121622 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5615384615384615, |
|
"acc_stderr": 0.025158266016868613, |
|
"acc_norm": 0.5615384615384615, |
|
"acc_norm_stderr": 0.025158266016868613 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6296296296296297, |
|
"acc_stderr": 0.04668408033024931, |
|
"acc_norm": 0.6296296296296297, |
|
"acc_norm_stderr": 0.04668408033024931 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43842364532019706, |
|
"acc_stderr": 0.03491207857486519, |
|
"acc_norm": 0.43842364532019706, |
|
"acc_norm_stderr": 0.03491207857486519 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.6290322580645161, |
|
"acc_stderr": 0.027480541887953593, |
|
"acc_norm": 0.6290322580645161, |
|
"acc_norm_stderr": 0.027480541887953593 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7735042735042735, |
|
"acc_stderr": 0.027421007295392933, |
|
"acc_norm": 0.7735042735042735, |
|
"acc_norm_stderr": 0.027421007295392933 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5547169811320755, |
|
"acc_stderr": 0.030588052974270658, |
|
"acc_norm": 0.5547169811320755, |
|
"acc_norm_stderr": 0.030588052974270658 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6090909090909091, |
|
"acc_stderr": 0.04673752333670239, |
|
"acc_norm": 0.6090909090909091, |
|
"acc_norm_stderr": 0.04673752333670239 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.029185714949857406, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.029185714949857406 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3708609271523179, |
|
"acc_stderr": 0.03943966699183629, |
|
"acc_norm": 0.3708609271523179, |
|
"acc_norm_stderr": 0.03943966699183629 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7313432835820896, |
|
"acc_stderr": 0.03134328358208954, |
|
"acc_norm": 0.7313432835820896, |
|
"acc_norm_stderr": 0.03134328358208954 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.5202312138728323, |
|
"acc_stderr": 0.03809342081273956, |
|
"acc_norm": 0.5202312138728323, |
|
"acc_norm_stderr": 0.03809342081273956 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.025107425481137285, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.025107425481137285 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5625, |
|
"acc_stderr": 0.04148415739394154, |
|
"acc_norm": 0.5625, |
|
"acc_norm_stderr": 0.04148415739394154 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.81, |
|
"acc_stderr": 0.03942772444036624, |
|
"acc_norm": 0.81, |
|
"acc_norm_stderr": 0.03942772444036624 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.6184971098265896, |
|
"acc_stderr": 0.026152198619726803, |
|
"acc_norm": 0.6184971098265896, |
|
"acc_norm_stderr": 0.026152198619726803 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5521472392638037, |
|
"acc_stderr": 0.03906947479456607, |
|
"acc_norm": 0.5521472392638037, |
|
"acc_norm_stderr": 0.03906947479456607 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6388888888888888, |
|
"acc_stderr": 0.02672586880910079, |
|
"acc_norm": 0.6388888888888888, |
|
"acc_norm_stderr": 0.02672586880910079 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7305699481865285, |
|
"acc_stderr": 0.03201867122877794, |
|
"acc_norm": 0.7305699481865285, |
|
"acc_norm_stderr": 0.03201867122877794 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.40350877192982454, |
|
"acc_stderr": 0.04615186962583703, |
|
"acc_norm": 0.40350877192982454, |
|
"acc_norm_stderr": 0.04615186962583703 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7027522935779816, |
|
"acc_stderr": 0.019595707224643533, |
|
"acc_norm": 0.7027522935779816, |
|
"acc_norm_stderr": 0.019595707224643533 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.04360314860077459, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.04360314860077459 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6503267973856209, |
|
"acc_stderr": 0.027305308076274695, |
|
"acc_norm": 0.6503267973856209, |
|
"acc_norm_stderr": 0.027305308076274695 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7024793388429752, |
|
"acc_stderr": 0.04173349148083499, |
|
"acc_norm": 0.7024793388429752, |
|
"acc_norm_stderr": 0.04173349148083499 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.6118421052631579, |
|
"acc_stderr": 0.03965842097512744, |
|
"acc_norm": 0.6118421052631579, |
|
"acc_norm_stderr": 0.03965842097512744 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5081699346405228, |
|
"acc_stderr": 0.020225134343057272, |
|
"acc_norm": 0.5081699346405228, |
|
"acc_norm_stderr": 0.020225134343057272 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3900709219858156, |
|
"acc_stderr": 0.029097675599463926, |
|
"acc_norm": 0.3900709219858156, |
|
"acc_norm_stderr": 0.029097675599463926 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.04595091388086298, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.04595091388086298 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5231481481481481, |
|
"acc_stderr": 0.034063153607115065, |
|
"acc_norm": 0.5231481481481481, |
|
"acc_norm_stderr": 0.034063153607115065 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2022346368715084, |
|
"acc_stderr": 0.013433729483320993, |
|
"acc_norm": 0.2022346368715084, |
|
"acc_norm_stderr": 0.013433729483320993 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.68, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.68, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5551470588235294, |
|
"acc_stderr": 0.030187532060329383, |
|
"acc_norm": 0.5551470588235294, |
|
"acc_norm_stderr": 0.030187532060329383 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.6612244897959184, |
|
"acc_stderr": 0.030299506562154188, |
|
"acc_norm": 0.6612244897959184, |
|
"acc_norm_stderr": 0.030299506562154188 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7637130801687764, |
|
"acc_stderr": 0.027652153144159263, |
|
"acc_norm": 0.7637130801687764, |
|
"acc_norm_stderr": 0.027652153144159263 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.39504563233376794, |
|
"acc_stderr": 0.012485727813251558, |
|
"acc_norm": 0.39504563233376794, |
|
"acc_norm_stderr": 0.012485727813251558 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.696078431372549, |
|
"acc_stderr": 0.03228210387037892, |
|
"acc_norm": 0.696078431372549, |
|
"acc_norm_stderr": 0.03228210387037892 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.7090909090909091, |
|
"acc_stderr": 0.03546563019624336, |
|
"acc_norm": 0.7090909090909091, |
|
"acc_norm_stderr": 0.03546563019624336 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2974296205630355, |
|
"mc1_stderr": 0.01600265148736101, |
|
"mc2": 0.43902374904102626, |
|
"mc2_stderr": 0.015135819154370348 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.6115702479338843, |
|
"acc_stderr": 0.01675692157106942, |
|
"acc_norm": 0.6375442739079102, |
|
"acc_norm_stderr": 0.0165271312404537 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Deepnoid/deep-solar-eeve-KorSTS", |
|
"model_sha": "63024622bcb7442d0d89e73930b5e57e675b22df", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |