|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.35238907849829354, |
|
"acc_stderr": 0.013960142600598678, |
|
"acc_norm": 0.3993174061433447, |
|
"acc_norm_stderr": 0.014312094557946707 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36914957179844654, |
|
"acc_stderr": 0.004815882719278385, |
|
"acc_norm": 0.469627564230233, |
|
"acc_norm_stderr": 0.004980566907790455 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.47368421052631576, |
|
"acc_stderr": 0.038295098689947286, |
|
"acc_norm": 0.47368421052631576, |
|
"acc_norm_stderr": 0.038295098689947286 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5048543689320388, |
|
"acc_stderr": 0.04950504382128921, |
|
"acc_norm": 0.5048543689320388, |
|
"acc_norm_stderr": 0.04950504382128921 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.438058748403576, |
|
"acc_stderr": 0.017742232238257223, |
|
"acc_norm": 0.438058748403576, |
|
"acc_norm_stderr": 0.017742232238257223 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.042320736951515885, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.042320736951515885 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3446808510638298, |
|
"acc_stderr": 0.031068985963122155, |
|
"acc_norm": 0.3446808510638298, |
|
"acc_norm_stderr": 0.031068985963122155 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3855421686746988, |
|
"acc_stderr": 0.03789134424611548, |
|
"acc_norm": 0.3855421686746988, |
|
"acc_norm_stderr": 0.03789134424611548 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4758842443729904, |
|
"acc_stderr": 0.028365041542564563, |
|
"acc_norm": 0.4758842443729904, |
|
"acc_norm_stderr": 0.028365041542564563 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.47085201793721976, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.47085201793721976, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4732824427480916, |
|
"acc_stderr": 0.04379024936553894, |
|
"acc_norm": 0.4732824427480916, |
|
"acc_norm_stderr": 0.04379024936553894 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5353535353535354, |
|
"acc_stderr": 0.03553436368828064, |
|
"acc_norm": 0.5353535353535354, |
|
"acc_norm_stderr": 0.03553436368828064 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.47586206896551725, |
|
"acc_stderr": 0.041618085035015295, |
|
"acc_norm": 0.47586206896551725, |
|
"acc_norm_stderr": 0.041618085035015295 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.04336432707993177, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.04336432707993177 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.46218487394957986, |
|
"acc_stderr": 0.032385469487589795, |
|
"acc_norm": 0.46218487394957986, |
|
"acc_norm_stderr": 0.032385469487589795 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4230769230769231, |
|
"acc_stderr": 0.025049197876042335, |
|
"acc_norm": 0.4230769230769231, |
|
"acc_norm_stderr": 0.025049197876042335 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.049999999999999996, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.049999999999999996 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.04803752235190192, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.04803752235190192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.37438423645320196, |
|
"acc_stderr": 0.03405155380561952, |
|
"acc_norm": 0.37438423645320196, |
|
"acc_norm_stderr": 0.03405155380561952 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.44193548387096776, |
|
"acc_stderr": 0.02825155790684974, |
|
"acc_norm": 0.44193548387096776, |
|
"acc_norm_stderr": 0.02825155790684974 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6837606837606838, |
|
"acc_stderr": 0.03046365674734027, |
|
"acc_norm": 0.6837606837606838, |
|
"acc_norm_stderr": 0.03046365674734027 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.44150943396226416, |
|
"acc_stderr": 0.030561590426731833, |
|
"acc_norm": 0.44150943396226416, |
|
"acc_norm_stderr": 0.030561590426731833 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.04769300568972743, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.04769300568972743 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.027195934804085622, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.027195934804085622 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31788079470198677, |
|
"acc_stderr": 0.038020397601079024, |
|
"acc_norm": 0.31788079470198677, |
|
"acc_norm_stderr": 0.038020397601079024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5572139303482587, |
|
"acc_stderr": 0.03512310964123936, |
|
"acc_norm": 0.5572139303482587, |
|
"acc_norm_stderr": 0.03512310964123936 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3930635838150289, |
|
"acc_stderr": 0.0372424959581773, |
|
"acc_norm": 0.3930635838150289, |
|
"acc_norm_stderr": 0.0372424959581773 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.024594975128920938, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.024594975128920938 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3402777777777778, |
|
"acc_stderr": 0.03962135573486219, |
|
"acc_norm": 0.3402777777777778, |
|
"acc_norm_stderr": 0.03962135573486219 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.67, |
|
"acc_stderr": 0.047258156262526066, |
|
"acc_norm": 0.67, |
|
"acc_norm_stderr": 0.047258156262526066 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.45375722543352603, |
|
"acc_stderr": 0.026803720583206188, |
|
"acc_norm": 0.45375722543352603, |
|
"acc_norm_stderr": 0.026803720583206188 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3987730061349693, |
|
"acc_stderr": 0.038470214204560246, |
|
"acc_norm": 0.3987730061349693, |
|
"acc_norm_stderr": 0.038470214204560246 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.027431623722415015, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.027431623722415015 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5025906735751295, |
|
"acc_stderr": 0.03608390745384487, |
|
"acc_norm": 0.5025906735751295, |
|
"acc_norm_stderr": 0.03608390745384487 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.04227054451232199, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.04227054451232199 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.45871559633027525, |
|
"acc_stderr": 0.02136412253388169, |
|
"acc_norm": 0.45871559633027525, |
|
"acc_norm_stderr": 0.02136412253388169 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.043062412591271526, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.043062412591271526 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5032679738562091, |
|
"acc_stderr": 0.028629305194003543, |
|
"acc_norm": 0.5032679738562091, |
|
"acc_norm_stderr": 0.028629305194003543 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5785123966942148, |
|
"acc_stderr": 0.04507732278775089, |
|
"acc_norm": 0.5785123966942148, |
|
"acc_norm_stderr": 0.04507732278775089 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4407894736842105, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.37254901960784315, |
|
"acc_stderr": 0.019559646809215937, |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.019559646809215937 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.027640120545169927, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.027640120545169927 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4017857142857143, |
|
"acc_stderr": 0.04653333146973646, |
|
"acc_norm": 0.4017857142857143, |
|
"acc_norm_stderr": 0.04653333146973646 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4305555555555556, |
|
"acc_stderr": 0.03376922151252335, |
|
"acc_norm": 0.4305555555555556, |
|
"acc_norm_stderr": 0.03376922151252335 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249603, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249603 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41911764705882354, |
|
"acc_stderr": 0.029972807170464626, |
|
"acc_norm": 0.41911764705882354, |
|
"acc_norm_stderr": 0.029972807170464626 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4897959183673469, |
|
"acc_stderr": 0.032002553478937816, |
|
"acc_norm": 0.4897959183673469, |
|
"acc_norm_stderr": 0.032002553478937816 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5738396624472574, |
|
"acc_stderr": 0.03219035703131774, |
|
"acc_norm": 0.5738396624472574, |
|
"acc_norm_stderr": 0.03219035703131774 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3109517601043025, |
|
"acc_stderr": 0.011822252917799201, |
|
"acc_norm": 0.3109517601043025, |
|
"acc_norm_stderr": 0.011822252917799201 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.43137254901960786, |
|
"acc_stderr": 0.03476099060501636, |
|
"acc_norm": 0.43137254901960786, |
|
"acc_norm_stderr": 0.03476099060501636 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3575757575757576, |
|
"acc_stderr": 0.03742597043806587, |
|
"acc_norm": 0.3575757575757576, |
|
"acc_norm_stderr": 0.03742597043806587 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3047735618115055, |
|
"mc1_stderr": 0.01611412415688246, |
|
"mc2": 0.4639508584743867, |
|
"mc2_stderr": 0.015654961139140635 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3778040141676505, |
|
"acc_stderr": 0.016669082840694967, |
|
"acc_norm": 0.5017709563164109, |
|
"acc_norm_stderr": 0.01719024627623186 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "LI-ST/Mistral-7B-ko-v0.005", |
|
"model_sha": "bfa2a68f96a69f45defb220e2bf4ffc31e6633f1", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |