|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.26109215017064846, |
|
"acc_stderr": 0.012835523909473864, |
|
"acc_norm": 0.32337883959044367, |
|
"acc_norm_stderr": 0.013669421630012123 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3641704839673372, |
|
"acc_stderr": 0.004802133511654235, |
|
"acc_norm": 0.45727942640908187, |
|
"acc_norm_stderr": 0.004971534874389935 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.21637426900584794, |
|
"acc_stderr": 0.031581495393387345, |
|
"acc_norm": 0.21637426900584794, |
|
"acc_norm_stderr": 0.031581495393387345 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690876, |
|
"acc_norm": 0.2524271844660194, |
|
"acc_norm_stderr": 0.04301250399690876 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2388250319284802, |
|
"acc_stderr": 0.015246803197398687, |
|
"acc_norm": 0.2388250319284802, |
|
"acc_norm_stderr": 0.015246803197398687 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2814814814814815, |
|
"acc_stderr": 0.03885004245800254, |
|
"acc_norm": 0.2814814814814815, |
|
"acc_norm_stderr": 0.03885004245800254 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2553191489361702, |
|
"acc_stderr": 0.028504856470514196, |
|
"acc_norm": 0.2553191489361702, |
|
"acc_norm_stderr": 0.028504856470514196 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3313253012048193, |
|
"acc_stderr": 0.03664314777288086, |
|
"acc_norm": 0.3313253012048193, |
|
"acc_norm_stderr": 0.03664314777288086 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.29260450160771706, |
|
"acc_stderr": 0.025839898334877983, |
|
"acc_norm": 0.29260450160771706, |
|
"acc_norm_stderr": 0.025839898334877983 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.21973094170403587, |
|
"acc_stderr": 0.0277901770643836, |
|
"acc_norm": 0.21973094170403587, |
|
"acc_norm_stderr": 0.0277901770643836 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.20610687022900764, |
|
"acc_stderr": 0.03547771004159463, |
|
"acc_norm": 0.20610687022900764, |
|
"acc_norm_stderr": 0.03547771004159463 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.22727272727272727, |
|
"acc_stderr": 0.029857515673386414, |
|
"acc_norm": 0.22727272727272727, |
|
"acc_norm_stderr": 0.029857515673386414 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.22758620689655173, |
|
"acc_stderr": 0.03493950380131184, |
|
"acc_norm": 0.22758620689655173, |
|
"acc_norm_stderr": 0.03493950380131184 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.03873958714149351, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.03873958714149351 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.22268907563025211, |
|
"acc_stderr": 0.027025433498882367, |
|
"acc_norm": 0.22268907563025211, |
|
"acc_norm_stderr": 0.027025433498882367 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.24102564102564103, |
|
"acc_stderr": 0.021685546665333184, |
|
"acc_norm": 0.24102564102564103, |
|
"acc_norm_stderr": 0.021685546665333184 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.04330043749650742, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.04330043749650742 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.27586206896551724, |
|
"acc_stderr": 0.03144712581678245, |
|
"acc_norm": 0.27586206896551724, |
|
"acc_norm_stderr": 0.03144712581678245 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2806451612903226, |
|
"acc_stderr": 0.025560604721022884, |
|
"acc_norm": 0.2806451612903226, |
|
"acc_norm_stderr": 0.025560604721022884 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.27350427350427353, |
|
"acc_stderr": 0.029202540153431194, |
|
"acc_norm": 0.27350427350427353, |
|
"acc_norm_stderr": 0.029202540153431194 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.26037735849056604, |
|
"acc_stderr": 0.0270087660907081, |
|
"acc_norm": 0.26037735849056604, |
|
"acc_norm_stderr": 0.0270087660907081 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.23636363636363636, |
|
"acc_stderr": 0.040693063197213775, |
|
"acc_norm": 0.23636363636363636, |
|
"acc_norm_stderr": 0.040693063197213775 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.02671924078371216, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.02671924078371216 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.23178807947019867, |
|
"acc_stderr": 0.03445406271987054, |
|
"acc_norm": 0.23178807947019867, |
|
"acc_norm_stderr": 0.03445406271987054 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.22885572139303484, |
|
"acc_stderr": 0.029705284056772436, |
|
"acc_norm": 0.22885572139303484, |
|
"acc_norm_stderr": 0.029705284056772436 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.031568093627031744, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.031568093627031744 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24603174603174602, |
|
"acc_stderr": 0.022182037202948368, |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.022182037202948368 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03476590104304134, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03476590104304134 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036845, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2543352601156069, |
|
"acc_stderr": 0.023445826276545546, |
|
"acc_norm": 0.2543352601156069, |
|
"acc_norm_stderr": 0.023445826276545546 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943354, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943354 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.19444444444444445, |
|
"acc_stderr": 0.0220213661002202, |
|
"acc_norm": 0.19444444444444445, |
|
"acc_norm_stderr": 0.0220213661002202 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24870466321243523, |
|
"acc_stderr": 0.031195840877700293, |
|
"acc_norm": 0.24870466321243523, |
|
"acc_norm_stderr": 0.031195840877700293 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.22568807339449543, |
|
"acc_stderr": 0.01792308766780305, |
|
"acc_norm": 0.22568807339449543, |
|
"acc_norm_stderr": 0.01792308766780305 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.039325376803928724, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.039325376803928724 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.023929155517351284, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.023929155517351284 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.19834710743801653, |
|
"acc_stderr": 0.03640118271990945, |
|
"acc_norm": 0.19834710743801653, |
|
"acc_norm_stderr": 0.03640118271990945 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17763157894736842, |
|
"acc_stderr": 0.03110318238312337, |
|
"acc_norm": 0.17763157894736842, |
|
"acc_norm_stderr": 0.03110318238312337 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.01716058723504635, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.01716058723504635 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.20212765957446807, |
|
"acc_stderr": 0.02395666823785024, |
|
"acc_norm": 0.20212765957446807, |
|
"acc_norm_stderr": 0.02395666823785024 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.043270409325787296, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.043270409325787296 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.03275773486100999, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.03275773486100999 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.22569832402234638, |
|
"acc_stderr": 0.013981395058455059, |
|
"acc_norm": 0.22569832402234638, |
|
"acc_norm_stderr": 0.013981395058455059 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.25735294117647056, |
|
"acc_stderr": 0.026556519470041513, |
|
"acc_norm": 0.25735294117647056, |
|
"acc_norm_stderr": 0.026556519470041513 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.23673469387755103, |
|
"acc_stderr": 0.027212835884073167, |
|
"acc_norm": 0.23673469387755103, |
|
"acc_norm_stderr": 0.027212835884073167 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.27848101265822783, |
|
"acc_stderr": 0.029178682304842534, |
|
"acc_norm": 0.27848101265822783, |
|
"acc_norm_stderr": 0.029178682304842534 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2438070404172099, |
|
"acc_stderr": 0.010966507972178475, |
|
"acc_norm": 0.2438070404172099, |
|
"acc_norm_stderr": 0.010966507972178475 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.03039153369274154, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.03039153369274154 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2787878787878788, |
|
"acc_stderr": 0.03501438706296781, |
|
"acc_norm": 0.2787878787878788, |
|
"acc_norm_stderr": 0.03501438706296781 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2423500611995104, |
|
"mc1_stderr": 0.015000674373570338, |
|
"mc2": 0.4144742012895836, |
|
"mc2_stderr": 0.015299571868403075 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.07042253521126761, |
|
"acc_stderr": 0.00877069161631731, |
|
"acc_norm": 0.10093896713615023, |
|
"acc_norm_stderr": 0.010326644717799555 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Nara-Lab/nallm-polyglot-ko-3.8b-base", |
|
"model_sha": "8d20c1e3d77f2a9a58046b58fb229c809476d350", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |