|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.19795221843003413, |
|
"acc_stderr": 0.011643990971573401, |
|
"acc_norm": 0.26535836177474403, |
|
"acc_norm_stderr": 0.012902554762313962 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.2633937462656841, |
|
"acc_stderr": 0.004395739495688583, |
|
"acc_norm": 0.27823142800239, |
|
"acc_norm_stderr": 0.004472121485161932 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03218093795602357, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03218093795602357 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.20388349514563106, |
|
"acc_stderr": 0.03989139859531771, |
|
"acc_norm": 0.20388349514563106, |
|
"acc_norm_stderr": 0.03989139859531771 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2669220945083014, |
|
"acc_stderr": 0.015818450894777552, |
|
"acc_norm": 0.2669220945083014, |
|
"acc_norm_stderr": 0.015818450894777552 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.22962962962962963, |
|
"acc_stderr": 0.03633384414073463, |
|
"acc_norm": 0.22962962962962963, |
|
"acc_norm_stderr": 0.03633384414073463 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.24680851063829787, |
|
"acc_stderr": 0.028185441301234102, |
|
"acc_norm": 0.24680851063829787, |
|
"acc_norm_stderr": 0.028185441301234102 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.29518072289156627, |
|
"acc_stderr": 0.0355092018568963, |
|
"acc_norm": 0.29518072289156627, |
|
"acc_norm_stderr": 0.0355092018568963 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.26688102893890675, |
|
"acc_stderr": 0.025122637608816657, |
|
"acc_norm": 0.26688102893890675, |
|
"acc_norm_stderr": 0.025122637608816657 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2556053811659193, |
|
"acc_stderr": 0.029275891003969923, |
|
"acc_norm": 0.2556053811659193, |
|
"acc_norm_stderr": 0.029275891003969923 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2366412213740458, |
|
"acc_stderr": 0.037276735755969195, |
|
"acc_norm": 0.2366412213740458, |
|
"acc_norm_stderr": 0.037276735755969195 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2474747474747475, |
|
"acc_stderr": 0.03074630074212451, |
|
"acc_norm": 0.2474747474747475, |
|
"acc_norm_stderr": 0.03074630074212451 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2620689655172414, |
|
"acc_stderr": 0.036646663372252565, |
|
"acc_norm": 0.2620689655172414, |
|
"acc_norm_stderr": 0.036646663372252565 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.17647058823529413, |
|
"acc_stderr": 0.03793281185307811, |
|
"acc_norm": 0.17647058823529413, |
|
"acc_norm_stderr": 0.03793281185307811 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3403361344537815, |
|
"acc_stderr": 0.030778057422931666, |
|
"acc_norm": 0.3403361344537815, |
|
"acc_norm_stderr": 0.030778057422931666 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.30512820512820515, |
|
"acc_stderr": 0.023346335293325887, |
|
"acc_norm": 0.30512820512820515, |
|
"acc_norm_stderr": 0.023346335293325887 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.03775251680686371, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.03775251680686371 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.15, |
|
"acc_stderr": 0.03588702812826371, |
|
"acc_norm": 0.15, |
|
"acc_norm_stderr": 0.03588702812826371 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.040191074725573483, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.040191074725573483 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.32019704433497537, |
|
"acc_stderr": 0.032826493853041504, |
|
"acc_norm": 0.32019704433497537, |
|
"acc_norm_stderr": 0.032826493853041504 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.31290322580645163, |
|
"acc_stderr": 0.026377567028645858, |
|
"acc_norm": 0.31290322580645163, |
|
"acc_norm_stderr": 0.026377567028645858 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.19230769230769232, |
|
"acc_stderr": 0.025819233256483727, |
|
"acc_norm": 0.19230769230769232, |
|
"acc_norm_stderr": 0.025819233256483727 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.22641509433962265, |
|
"acc_stderr": 0.025757559893106727, |
|
"acc_norm": 0.22641509433962265, |
|
"acc_norm_stderr": 0.025757559893106727 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.23636363636363636, |
|
"acc_stderr": 0.04069306319721376, |
|
"acc_norm": 0.23636363636363636, |
|
"acc_norm_stderr": 0.04069306319721376 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.02659393910184408, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.02659393910184408 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.23880597014925373, |
|
"acc_stderr": 0.030147775935409224, |
|
"acc_norm": 0.23880597014925373, |
|
"acc_norm_stderr": 0.030147775935409224 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.031568093627031744, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.031568093627031744 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.022418042891113946, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.022418042891113946 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23699421965317918, |
|
"acc_stderr": 0.022894082489925992, |
|
"acc_norm": 0.23699421965317918, |
|
"acc_norm_stderr": 0.022894082489925992 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.27607361963190186, |
|
"acc_stderr": 0.0351238528370505, |
|
"acc_norm": 0.27607361963190186, |
|
"acc_norm_stderr": 0.0351238528370505 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2654320987654321, |
|
"acc_stderr": 0.02456922360046085, |
|
"acc_norm": 0.2654320987654321, |
|
"acc_norm_stderr": 0.02456922360046085 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.30569948186528495, |
|
"acc_stderr": 0.03324837939758159, |
|
"acc_norm": 0.30569948186528495, |
|
"acc_norm_stderr": 0.03324837939758159 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.0414243971948936, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.0414243971948936 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.23302752293577983, |
|
"acc_stderr": 0.018125669180861514, |
|
"acc_norm": 0.23302752293577983, |
|
"acc_norm_stderr": 0.018125669180861514 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.16666666666666666, |
|
"acc_stderr": 0.03333333333333337, |
|
"acc_norm": 0.16666666666666666, |
|
"acc_norm_stderr": 0.03333333333333337 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.24183006535947713, |
|
"acc_stderr": 0.024518195641879334, |
|
"acc_norm": 0.24183006535947713, |
|
"acc_norm_stderr": 0.024518195641879334 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.2975206611570248, |
|
"acc_stderr": 0.04173349148083497, |
|
"acc_norm": 0.2975206611570248, |
|
"acc_norm_stderr": 0.04173349148083497 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23026315789473684, |
|
"acc_stderr": 0.034260594244031654, |
|
"acc_norm": 0.23026315789473684, |
|
"acc_norm_stderr": 0.034260594244031654 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.21895424836601307, |
|
"acc_stderr": 0.016729937565537537, |
|
"acc_norm": 0.21895424836601307, |
|
"acc_norm_stderr": 0.016729937565537537 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24822695035460993, |
|
"acc_stderr": 0.025770015644290396, |
|
"acc_norm": 0.24822695035460993, |
|
"acc_norm_stderr": 0.025770015644290396 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.04287858751340456, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.04287858751340456 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4398148148148148, |
|
"acc_stderr": 0.033851779760448106, |
|
"acc_norm": 0.4398148148148148, |
|
"acc_norm_stderr": 0.033851779760448106 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24581005586592178, |
|
"acc_stderr": 0.01440029642922561, |
|
"acc_norm": 0.24581005586592178, |
|
"acc_norm_stderr": 0.01440029642922561 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.44485294117647056, |
|
"acc_stderr": 0.03018753206032938, |
|
"acc_norm": 0.44485294117647056, |
|
"acc_norm_stderr": 0.03018753206032938 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.23265306122448978, |
|
"acc_stderr": 0.02704925791589618, |
|
"acc_norm": 0.23265306122448978, |
|
"acc_norm_stderr": 0.02704925791589618 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2320675105485232, |
|
"acc_stderr": 0.02747974455080851, |
|
"acc_norm": 0.2320675105485232, |
|
"acc_norm_stderr": 0.02747974455080851 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2301173402868318, |
|
"acc_stderr": 0.010750183177375553, |
|
"acc_norm": 0.2301173402868318, |
|
"acc_norm_stderr": 0.010750183177375553 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.03039153369274154, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.03039153369274154 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.22424242424242424, |
|
"acc_stderr": 0.03256866661681102, |
|
"acc_norm": 0.22424242424242424, |
|
"acc_norm_stderr": 0.03256866661681102 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2741738066095471, |
|
"mc1_stderr": 0.015616518497219381, |
|
"mc2": 0.538620436654127, |
|
"mc2_stderr": 0.016366108934105512 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.15230224321133412, |
|
"acc_stderr": 0.01235345636132145, |
|
"acc_norm": 0.3742621015348288, |
|
"acc_norm_stderr": 0.016637917789798735 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "FINDA-FIT/llama-ko-7b", |
|
"model_sha": "c1f0b9f20d38c9494e1607bd30ce43da570d9d52", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |