|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.21331058020477817, |
|
"acc_stderr": 0.011970971742326334, |
|
"acc_norm": 0.26023890784982934, |
|
"acc_norm_stderr": 0.012821930225112552 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.30302728540131446, |
|
"acc_stderr": 0.004586276903267076, |
|
"acc_norm": 0.3558056164110735, |
|
"acc_norm_stderr": 0.00477778258481779 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.36257309941520466, |
|
"acc_stderr": 0.036871306155620606, |
|
"acc_norm": 0.36257309941520466, |
|
"acc_norm_stderr": 0.036871306155620606 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.30097087378640774, |
|
"acc_stderr": 0.045416094465039476, |
|
"acc_norm": 0.30097087378640774, |
|
"acc_norm_stderr": 0.045416094465039476 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.30268199233716475, |
|
"acc_stderr": 0.01642878158174936, |
|
"acc_norm": 0.30268199233716475, |
|
"acc_norm_stderr": 0.01642878158174936 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.03820169914517905, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.03820169914517905 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.30638297872340425, |
|
"acc_stderr": 0.03013590647851756, |
|
"acc_norm": 0.30638297872340425, |
|
"acc_norm_stderr": 0.03013590647851756 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.21686746987951808, |
|
"acc_stderr": 0.03208284450356365, |
|
"acc_norm": 0.21686746987951808, |
|
"acc_norm_stderr": 0.03208284450356365 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3086816720257235, |
|
"acc_stderr": 0.026236965881153266, |
|
"acc_norm": 0.3086816720257235, |
|
"acc_norm_stderr": 0.026236965881153266 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.31390134529147984, |
|
"acc_stderr": 0.031146796482972465, |
|
"acc_norm": 0.31390134529147984, |
|
"acc_norm_stderr": 0.031146796482972465 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2748091603053435, |
|
"acc_stderr": 0.039153454088478354, |
|
"acc_norm": 0.2748091603053435, |
|
"acc_norm_stderr": 0.039153454088478354 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.046482319871173156, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.046482319871173156 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29292929292929293, |
|
"acc_stderr": 0.03242497958178815, |
|
"acc_norm": 0.29292929292929293, |
|
"acc_norm_stderr": 0.03242497958178815 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3448275862068966, |
|
"acc_stderr": 0.03960933549451209, |
|
"acc_norm": 0.3448275862068966, |
|
"acc_norm_stderr": 0.03960933549451209 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.040925639582376536, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.040925639582376536 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3277310924369748, |
|
"acc_stderr": 0.03048991141767323, |
|
"acc_norm": 0.3277310924369748, |
|
"acc_norm_stderr": 0.03048991141767323 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3487179487179487, |
|
"acc_stderr": 0.02416278028401772, |
|
"acc_norm": 0.3487179487179487, |
|
"acc_norm_stderr": 0.02416278028401772 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165044, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165044 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.04812917324536823, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.04812917324536823 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.29064039408866993, |
|
"acc_stderr": 0.0319474007226554, |
|
"acc_norm": 0.29064039408866993, |
|
"acc_norm_stderr": 0.0319474007226554 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3096774193548387, |
|
"acc_stderr": 0.026302774983517418, |
|
"acc_norm": 0.3096774193548387, |
|
"acc_norm_stderr": 0.026302774983517418 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.452991452991453, |
|
"acc_stderr": 0.032610998730986204, |
|
"acc_norm": 0.452991452991453, |
|
"acc_norm_stderr": 0.032610998730986204 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2792452830188679, |
|
"acc_stderr": 0.02761116340239972, |
|
"acc_norm": 0.2792452830188679, |
|
"acc_norm_stderr": 0.02761116340239972 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.32727272727272727, |
|
"acc_stderr": 0.04494290866252088, |
|
"acc_norm": 0.32727272727272727, |
|
"acc_norm_stderr": 0.04494290866252088 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.21481481481481482, |
|
"acc_stderr": 0.025040443877000693, |
|
"acc_norm": 0.21481481481481482, |
|
"acc_norm_stderr": 0.025040443877000693 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.26490066225165565, |
|
"acc_stderr": 0.03603038545360385, |
|
"acc_norm": 0.26490066225165565, |
|
"acc_norm_stderr": 0.03603038545360385 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.34328358208955223, |
|
"acc_stderr": 0.03357379665433431, |
|
"acc_norm": 0.34328358208955223, |
|
"acc_norm_stderr": 0.03357379665433431 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3063583815028902, |
|
"acc_stderr": 0.03514942551267437, |
|
"acc_norm": 0.3063583815028902, |
|
"acc_norm_stderr": 0.03514942551267437 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25132275132275134, |
|
"acc_stderr": 0.022340482339643898, |
|
"acc_norm": 0.25132275132275134, |
|
"acc_norm_stderr": 0.022340482339643898 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3236994219653179, |
|
"acc_stderr": 0.0251901813276084, |
|
"acc_norm": 0.3236994219653179, |
|
"acc_norm_stderr": 0.0251901813276084 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.27607361963190186, |
|
"acc_stderr": 0.0351238528370505, |
|
"acc_norm": 0.27607361963190186, |
|
"acc_norm_stderr": 0.0351238528370505 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3271604938271605, |
|
"acc_stderr": 0.026105673861409814, |
|
"acc_norm": 0.3271604938271605, |
|
"acc_norm_stderr": 0.026105673861409814 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.3160621761658031, |
|
"acc_stderr": 0.033553973696861736, |
|
"acc_norm": 0.3160621761658031, |
|
"acc_norm_stderr": 0.033553973696861736 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.19298245614035087, |
|
"acc_stderr": 0.037124548537213684, |
|
"acc_norm": 0.19298245614035087, |
|
"acc_norm_stderr": 0.037124548537213684 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.30825688073394497, |
|
"acc_stderr": 0.01979836669836725, |
|
"acc_norm": 0.30825688073394497, |
|
"acc_norm_stderr": 0.01979836669836725 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.04163453031302859, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.04163453031302859 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.369281045751634, |
|
"acc_stderr": 0.02763417668960266, |
|
"acc_norm": 0.369281045751634, |
|
"acc_norm_stderr": 0.02763417668960266 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.4049586776859504, |
|
"acc_stderr": 0.044811377559424694, |
|
"acc_norm": 0.4049586776859504, |
|
"acc_norm_stderr": 0.044811377559424694 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.035834961763610625, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.035834961763610625 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2875816993464052, |
|
"acc_stderr": 0.018311653053648222, |
|
"acc_norm": 0.2875816993464052, |
|
"acc_norm_stderr": 0.018311653053648222 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24113475177304963, |
|
"acc_stderr": 0.025518731049537773, |
|
"acc_norm": 0.24113475177304963, |
|
"acc_norm_stderr": 0.025518731049537773 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.32142857142857145, |
|
"acc_stderr": 0.044328040552915206, |
|
"acc_norm": 0.32142857142857145, |
|
"acc_norm_stderr": 0.044328040552915206 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26033519553072626, |
|
"acc_stderr": 0.014676252009319463, |
|
"acc_norm": 0.26033519553072626, |
|
"acc_norm_stderr": 0.014676252009319463 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.02576725201085595, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.02576725201085595 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.37142857142857144, |
|
"acc_stderr": 0.030932858792789848, |
|
"acc_norm": 0.37142857142857144, |
|
"acc_norm_stderr": 0.030932858792789848 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.21940928270042195, |
|
"acc_stderr": 0.026939106581553945, |
|
"acc_norm": 0.21940928270042195, |
|
"acc_norm_stderr": 0.026939106581553945 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.26792698826597133, |
|
"acc_stderr": 0.011311347690633886, |
|
"acc_norm": 0.26792698826597133, |
|
"acc_norm_stderr": 0.011311347690633886 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.030190282453501943, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.030190282453501943 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2787878787878788, |
|
"acc_stderr": 0.03501438706296781, |
|
"acc_norm": 0.2787878787878788, |
|
"acc_norm_stderr": 0.03501438706296781 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2717258261933905, |
|
"mc1_stderr": 0.015572840452875835, |
|
"mc2": 0.43296733660801473, |
|
"mc2_stderr": 0.015927191551239974 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2476525821596244, |
|
"acc_stderr": 0.014796734034366533, |
|
"acc_norm": 0.29694835680751175, |
|
"acc_norm_stderr": 0.015662796197363153 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "heegyu/LIMA-13b-hf", |
|
"model_sha": "98faa74a9b41cbd9033904cd58420705936849eb", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |