|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2098976109215017, |
|
"acc_stderr": 0.01190054874804745, |
|
"acc_norm": 0.2593856655290102, |
|
"acc_norm_stderr": 0.012808273573927092 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3014339772953595, |
|
"acc_stderr": 0.004579429184835869, |
|
"acc_norm": 0.3571001792471619, |
|
"acc_norm_stderr": 0.004781654610857135 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.036155076303109344, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.036155076303109344 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2815533980582524, |
|
"acc_stderr": 0.04453254836326467, |
|
"acc_norm": 0.2815533980582524, |
|
"acc_norm_stderr": 0.04453254836326467 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3065134099616858, |
|
"acc_stderr": 0.016486952893041515, |
|
"acc_norm": 0.3065134099616858, |
|
"acc_norm_stderr": 0.016486952893041515 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2814814814814815, |
|
"acc_stderr": 0.03885004245800254, |
|
"acc_norm": 0.2814814814814815, |
|
"acc_norm_stderr": 0.03885004245800254 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3148936170212766, |
|
"acc_stderr": 0.03036358219723816, |
|
"acc_norm": 0.3148936170212766, |
|
"acc_norm_stderr": 0.03036358219723816 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.28313253012048195, |
|
"acc_stderr": 0.03507295431370519, |
|
"acc_norm": 0.28313253012048195, |
|
"acc_norm_stderr": 0.03507295431370519 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3536977491961415, |
|
"acc_stderr": 0.02715520810320086, |
|
"acc_norm": 0.3536977491961415, |
|
"acc_norm_stderr": 0.02715520810320086 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.27802690582959644, |
|
"acc_stderr": 0.03006958487449405, |
|
"acc_norm": 0.27802690582959644, |
|
"acc_norm_stderr": 0.03006958487449405 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.31297709923664124, |
|
"acc_stderr": 0.04066962905677697, |
|
"acc_norm": 0.31297709923664124, |
|
"acc_norm_stderr": 0.04066962905677697 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29797979797979796, |
|
"acc_stderr": 0.03258630383836556, |
|
"acc_norm": 0.29797979797979796, |
|
"acc_norm_stderr": 0.03258630383836556 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3310344827586207, |
|
"acc_stderr": 0.03921545312467121, |
|
"acc_norm": 0.3310344827586207, |
|
"acc_norm_stderr": 0.03921545312467121 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083286, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083286 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.030388353551886838, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.030388353551886838 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.33076923076923076, |
|
"acc_stderr": 0.0238547956809711, |
|
"acc_norm": 0.33076923076923076, |
|
"acc_norm_stderr": 0.0238547956809711 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.39814814814814814, |
|
"acc_stderr": 0.04732332615978814, |
|
"acc_norm": 0.39814814814814814, |
|
"acc_norm_stderr": 0.04732332615978814 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2660098522167488, |
|
"acc_stderr": 0.031089826002937523, |
|
"acc_norm": 0.2660098522167488, |
|
"acc_norm_stderr": 0.031089826002937523 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.33548387096774196, |
|
"acc_stderr": 0.02686020644472434, |
|
"acc_norm": 0.33548387096774196, |
|
"acc_norm_stderr": 0.02686020644472434 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.46153846153846156, |
|
"acc_stderr": 0.03265903381186195, |
|
"acc_norm": 0.46153846153846156, |
|
"acc_norm_stderr": 0.03265903381186195 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.27169811320754716, |
|
"acc_stderr": 0.027377706624670716, |
|
"acc_norm": 0.27169811320754716, |
|
"acc_norm_stderr": 0.027377706624670716 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04389311454644286, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04389311454644286 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.21851851851851853, |
|
"acc_stderr": 0.025195752251823793, |
|
"acc_norm": 0.21851851851851853, |
|
"acc_norm_stderr": 0.025195752251823793 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2582781456953642, |
|
"acc_stderr": 0.035737053147634576, |
|
"acc_norm": 0.2582781456953642, |
|
"acc_norm_stderr": 0.035737053147634576 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.3283582089552239, |
|
"acc_stderr": 0.033206858897443244, |
|
"acc_norm": 0.3283582089552239, |
|
"acc_norm_stderr": 0.033206858897443244 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.27167630057803466, |
|
"acc_stderr": 0.03391750322321659, |
|
"acc_norm": 0.27167630057803466, |
|
"acc_norm_stderr": 0.03391750322321659 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.23544973544973544, |
|
"acc_stderr": 0.021851509822031705, |
|
"acc_norm": 0.23544973544973544, |
|
"acc_norm_stderr": 0.021851509822031705 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.03745554791462457, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03745554791462457 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.35260115606936415, |
|
"acc_stderr": 0.025722802200895817, |
|
"acc_norm": 0.35260115606936415, |
|
"acc_norm_stderr": 0.025722802200895817 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2883435582822086, |
|
"acc_stderr": 0.035590395316173425, |
|
"acc_norm": 0.2883435582822086, |
|
"acc_norm_stderr": 0.035590395316173425 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.30864197530864196, |
|
"acc_stderr": 0.025702640260603753, |
|
"acc_norm": 0.30864197530864196, |
|
"acc_norm_stderr": 0.025702640260603753 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.3316062176165803, |
|
"acc_stderr": 0.03397636541089117, |
|
"acc_norm": 0.3316062176165803, |
|
"acc_norm_stderr": 0.03397636541089117 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.29724770642201837, |
|
"acc_stderr": 0.019595707224643533, |
|
"acc_norm": 0.29724770642201837, |
|
"acc_norm_stderr": 0.019595707224643533 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.042163702135578345, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.042163702135578345 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3790849673202614, |
|
"acc_stderr": 0.02778014120702335, |
|
"acc_norm": 0.3790849673202614, |
|
"acc_norm_stderr": 0.02778014120702335 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.04545454545454546, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.04545454545454546 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.03690677986137282, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.03690677986137282 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2826797385620915, |
|
"acc_stderr": 0.018217269552053446, |
|
"acc_norm": 0.2826797385620915, |
|
"acc_norm_stderr": 0.018217269552053446 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.26595744680851063, |
|
"acc_stderr": 0.026358065698880585, |
|
"acc_norm": 0.26595744680851063, |
|
"acc_norm_stderr": 0.026358065698880585 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04547960999764376, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04547960999764376 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3287037037037037, |
|
"acc_stderr": 0.032036140846700596, |
|
"acc_norm": 0.3287037037037037, |
|
"acc_norm_stderr": 0.032036140846700596 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27150837988826815, |
|
"acc_stderr": 0.014874252168095278, |
|
"acc_norm": 0.27150837988826815, |
|
"acc_norm_stderr": 0.014874252168095278 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2536764705882353, |
|
"acc_stderr": 0.026431329870789513, |
|
"acc_norm": 0.2536764705882353, |
|
"acc_norm_stderr": 0.026431329870789513 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.44081632653061226, |
|
"acc_stderr": 0.03178419114175363, |
|
"acc_norm": 0.44081632653061226, |
|
"acc_norm_stderr": 0.03178419114175363 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.29535864978902954, |
|
"acc_stderr": 0.029696338713422893, |
|
"acc_norm": 0.29535864978902954, |
|
"acc_norm_stderr": 0.029696338713422893 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.26140808344198174, |
|
"acc_stderr": 0.011222528169771312, |
|
"acc_norm": 0.26140808344198174, |
|
"acc_norm_stderr": 0.011222528169771312 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.031980016601150706, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.031980016601150706 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.30303030303030304, |
|
"acc_stderr": 0.03588624800091709, |
|
"acc_norm": 0.30303030303030304, |
|
"acc_norm_stderr": 0.03588624800091709 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.27050183598531213, |
|
"mc1_stderr": 0.01555077833284288, |
|
"mc2": 0.43560981343267496, |
|
"mc2_stderr": 0.01587676917939091 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.22077922077922077, |
|
"acc_stderr": 0.014260152803540045, |
|
"acc_norm": 0.3435655253837072, |
|
"acc_norm_stderr": 0.016327334806429145 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "huggyllama/llama-13b", |
|
"model_sha": "bf57045473f207bb1de1ed035ace226f4d9f9bba", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |