results
/
ENERGY-DRINK-LOVE
/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT
/result_2024-03-04 07:17:52.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.44795221843003413, | |
"acc_stderr": 0.014532011498211669, | |
"acc_norm": 0.4931740614334471, | |
"acc_norm_stderr": 0.014610029151379813 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4473212507468632, | |
"acc_stderr": 0.004962010338226348, | |
"acc_norm": 0.5994821748655647, | |
"acc_norm_stderr": 0.004890019356021089 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.6023391812865497, | |
"acc_stderr": 0.03753638955761691, | |
"acc_norm": 0.6023391812865497, | |
"acc_norm_stderr": 0.03753638955761691 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.6699029126213593, | |
"acc_stderr": 0.046561471100123514, | |
"acc_norm": 0.6699029126213593, | |
"acc_norm_stderr": 0.046561471100123514 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.6628352490421456, | |
"acc_stderr": 0.016905207420803547, | |
"acc_norm": 0.6628352490421456, | |
"acc_norm_stderr": 0.016905207420803547 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.43703703703703706, | |
"acc_stderr": 0.04284958639753399, | |
"acc_norm": 0.43703703703703706, | |
"acc_norm_stderr": 0.04284958639753399 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.04461960433384741, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.04461960433384741 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.502127659574468, | |
"acc_stderr": 0.03268572658667492, | |
"acc_norm": 0.502127659574468, | |
"acc_norm_stderr": 0.03268572658667492 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.4397590361445783, | |
"acc_stderr": 0.03864139923699121, | |
"acc_norm": 0.4397590361445783, | |
"acc_norm_stderr": 0.03864139923699121 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5916398713826366, | |
"acc_stderr": 0.027917050748484627, | |
"acc_norm": 0.5916398713826366, | |
"acc_norm_stderr": 0.027917050748484627 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.5022421524663677, | |
"acc_stderr": 0.033557465352232634, | |
"acc_norm": 0.5022421524663677, | |
"acc_norm_stderr": 0.033557465352232634 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.6412213740458015, | |
"acc_stderr": 0.04206739313864908, | |
"acc_norm": 0.6412213740458015, | |
"acc_norm_stderr": 0.04206739313864908 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.04988876515698589, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.7070707070707071, | |
"acc_stderr": 0.03242497958178817, | |
"acc_norm": 0.7070707070707071, | |
"acc_norm_stderr": 0.03242497958178817 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4896551724137931, | |
"acc_stderr": 0.04165774775728762, | |
"acc_norm": 0.4896551724137931, | |
"acc_norm_stderr": 0.04165774775728762 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.21568627450980393, | |
"acc_stderr": 0.04092563958237655, | |
"acc_norm": 0.21568627450980393, | |
"acc_norm_stderr": 0.04092563958237655 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5462184873949579, | |
"acc_stderr": 0.032339434681820885, | |
"acc_norm": 0.5462184873949579, | |
"acc_norm_stderr": 0.032339434681820885 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.5333333333333333, | |
"acc_stderr": 0.025294608023986455, | |
"acc_norm": 0.5333333333333333, | |
"acc_norm_stderr": 0.025294608023986455 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.57, | |
"acc_stderr": 0.04975698519562429, | |
"acc_norm": 0.57, | |
"acc_norm_stderr": 0.04975698519562429 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.047258156262526045, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.6296296296296297, | |
"acc_stderr": 0.04668408033024931, | |
"acc_norm": 0.6296296296296297, | |
"acc_norm_stderr": 0.04668408033024931 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3497536945812808, | |
"acc_stderr": 0.03355400904969566, | |
"acc_norm": 0.3497536945812808, | |
"acc_norm_stderr": 0.03355400904969566 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5774193548387097, | |
"acc_stderr": 0.02810096472427264, | |
"acc_norm": 0.5774193548387097, | |
"acc_norm_stderr": 0.02810096472427264 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.7521367521367521, | |
"acc_stderr": 0.028286324075564404, | |
"acc_norm": 0.7521367521367521, | |
"acc_norm_stderr": 0.028286324075564404 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.5735849056603773, | |
"acc_stderr": 0.030437794342983042, | |
"acc_norm": 0.5735849056603773, | |
"acc_norm_stderr": 0.030437794342983042 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5181818181818182, | |
"acc_stderr": 0.04785964010794916, | |
"acc_norm": 0.5181818181818182, | |
"acc_norm_stderr": 0.04785964010794916 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.31851851851851853, | |
"acc_stderr": 0.02840653309060846, | |
"acc_norm": 0.31851851851851853, | |
"acc_norm_stderr": 0.02840653309060846 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2980132450331126, | |
"acc_stderr": 0.037345356767871984, | |
"acc_norm": 0.2980132450331126, | |
"acc_norm_stderr": 0.037345356767871984 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.681592039800995, | |
"acc_stderr": 0.032941184790540944, | |
"acc_norm": 0.681592039800995, | |
"acc_norm_stderr": 0.032941184790540944 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.4682080924855491, | |
"acc_stderr": 0.03804749744364764, | |
"acc_norm": 0.4682080924855491, | |
"acc_norm_stderr": 0.03804749744364764 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.35714285714285715, | |
"acc_stderr": 0.024677862841332786, | |
"acc_norm": 0.35714285714285715, | |
"acc_norm_stderr": 0.024677862841332786 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.4027777777777778, | |
"acc_stderr": 0.04101405519842425, | |
"acc_norm": 0.4027777777777778, | |
"acc_norm_stderr": 0.04101405519842425 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.41, | |
"acc_stderr": 0.049431107042371025, | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.049431107042371025 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.75, | |
"acc_stderr": 0.04351941398892446, | |
"acc_norm": 0.75, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5664739884393064, | |
"acc_stderr": 0.026680134761679217, | |
"acc_norm": 0.5664739884393064, | |
"acc_norm_stderr": 0.026680134761679217 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.44785276073619634, | |
"acc_stderr": 0.03906947479456601, | |
"acc_norm": 0.44785276073619634, | |
"acc_norm_stderr": 0.03906947479456601 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.5462962962962963, | |
"acc_stderr": 0.0277012284685426, | |
"acc_norm": 0.5462962962962963, | |
"acc_norm_stderr": 0.0277012284685426 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.34, | |
"acc_stderr": 0.04760952285695235, | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.04760952285695235 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.6010362694300518, | |
"acc_stderr": 0.03533999094065696, | |
"acc_norm": 0.6010362694300518, | |
"acc_norm_stderr": 0.03533999094065696 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.3333333333333333, | |
"acc_stderr": 0.04434600701584925, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.04434600701584925 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.6330275229357798, | |
"acc_stderr": 0.020664675659520536, | |
"acc_norm": 0.6330275229357798, | |
"acc_norm_stderr": 0.020664675659520536 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.2222222222222222, | |
"acc_stderr": 0.037184890068181146, | |
"acc_norm": 0.2222222222222222, | |
"acc_norm_stderr": 0.037184890068181146 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.5849673202614379, | |
"acc_stderr": 0.028213504177824093, | |
"acc_norm": 0.5849673202614379, | |
"acc_norm_stderr": 0.028213504177824093 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.52, | |
"acc_stderr": 0.05021167315686779, | |
"acc_norm": 0.52, | |
"acc_norm_stderr": 0.05021167315686779 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6694214876033058, | |
"acc_stderr": 0.04294340845212094, | |
"acc_norm": 0.6694214876033058, | |
"acc_norm_stderr": 0.04294340845212094 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.5263157894736842, | |
"acc_stderr": 0.040633027314866725, | |
"acc_norm": 0.5263157894736842, | |
"acc_norm_stderr": 0.040633027314866725 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.4411764705882353, | |
"acc_stderr": 0.02008736207670285, | |
"acc_norm": 0.4411764705882353, | |
"acc_norm_stderr": 0.02008736207670285 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.32269503546099293, | |
"acc_stderr": 0.02788913930053479, | |
"acc_norm": 0.32269503546099293, | |
"acc_norm_stderr": 0.02788913930053479 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.39285714285714285, | |
"acc_stderr": 0.04635550135609976, | |
"acc_norm": 0.39285714285714285, | |
"acc_norm_stderr": 0.04635550135609976 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.4212962962962963, | |
"acc_stderr": 0.03367462138896078, | |
"acc_norm": 0.4212962962962963, | |
"acc_norm_stderr": 0.03367462138896078 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.19106145251396647, | |
"acc_stderr": 0.013148479802450798, | |
"acc_norm": 0.19106145251396647, | |
"acc_norm_stderr": 0.013148479802450798 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.5367647058823529, | |
"acc_stderr": 0.030290619180485694, | |
"acc_norm": 0.5367647058823529, | |
"acc_norm_stderr": 0.030290619180485694 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.5918367346938775, | |
"acc_stderr": 0.03146465712827424, | |
"acc_norm": 0.5918367346938775, | |
"acc_norm_stderr": 0.03146465712827424 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6835443037974683, | |
"acc_stderr": 0.030274974880218974, | |
"acc_norm": 0.6835443037974683, | |
"acc_norm_stderr": 0.030274974880218974 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.35984354628422427, | |
"acc_stderr": 0.012258260483689805, | |
"acc_norm": 0.35984354628422427, | |
"acc_norm_stderr": 0.012258260483689805 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.6421568627450981, | |
"acc_stderr": 0.03364487286088299, | |
"acc_norm": 0.6421568627450981, | |
"acc_norm_stderr": 0.03364487286088299 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.6303030303030303, | |
"acc_stderr": 0.03769430314512568, | |
"acc_norm": 0.6303030303030303, | |
"acc_norm_stderr": 0.03769430314512568 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.2558139534883721, | |
"mc1_stderr": 0.015274176219283344, | |
"mc2": 0.3960929779706412, | |
"mc2_stderr": 0.014964127725897232 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.3140495867768595, | |
"acc_stderr": 0.01595733243429507, | |
"acc_norm": 0.3612750885478158, | |
"acc_norm_stderr": 0.016515463022412007 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT", | |
"model_sha": "a209a3297068a834c50c3141d8dc56cd78754280", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |