|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4044368600682594, |
|
"acc_stderr": 0.014342036483436177, |
|
"acc_norm": 0.46245733788395904, |
|
"acc_norm_stderr": 0.014570144495075576 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.42182832105158335, |
|
"acc_stderr": 0.004928420903026553, |
|
"acc_norm": 0.5668193586934873, |
|
"acc_norm_stderr": 0.0049450236570322765 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49707602339181284, |
|
"acc_stderr": 0.03834759370936839, |
|
"acc_norm": 0.49707602339181284, |
|
"acc_norm_stderr": 0.03834759370936839 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4854368932038835, |
|
"acc_stderr": 0.04948637324026637, |
|
"acc_norm": 0.4854368932038835, |
|
"acc_norm_stderr": 0.04948637324026637 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.541507024265645, |
|
"acc_stderr": 0.017818248603465585, |
|
"acc_norm": 0.541507024265645, |
|
"acc_norm_stderr": 0.017818248603465585 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4888888888888889, |
|
"acc_stderr": 0.04318275491977976, |
|
"acc_norm": 0.4888888888888889, |
|
"acc_norm_stderr": 0.04318275491977976 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.032081157507886836, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.032081157507886836 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42168674698795183, |
|
"acc_stderr": 0.038444531817709175, |
|
"acc_norm": 0.42168674698795183, |
|
"acc_norm_stderr": 0.038444531817709175 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5048231511254019, |
|
"acc_stderr": 0.02839677044411129, |
|
"acc_norm": 0.5048231511254019, |
|
"acc_norm_stderr": 0.02839677044411129 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.484304932735426, |
|
"acc_stderr": 0.0335412657542081, |
|
"acc_norm": 0.484304932735426, |
|
"acc_norm_stderr": 0.0335412657542081 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48854961832061067, |
|
"acc_stderr": 0.04384140024078016, |
|
"acc_norm": 0.48854961832061067, |
|
"acc_norm_stderr": 0.04384140024078016 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.0498887651569859, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.0498887651569859 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5404040404040404, |
|
"acc_stderr": 0.035507024651313425, |
|
"acc_norm": 0.5404040404040404, |
|
"acc_norm_stderr": 0.035507024651313425 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.43448275862068964, |
|
"acc_stderr": 0.041307408795554966, |
|
"acc_norm": 0.43448275862068964, |
|
"acc_norm_stderr": 0.041307408795554966 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.04488482852329017, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.04488482852329017 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.42436974789915966, |
|
"acc_stderr": 0.032104790510157764, |
|
"acc_norm": 0.42436974789915966, |
|
"acc_norm_stderr": 0.032104790510157764 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.44871794871794873, |
|
"acc_stderr": 0.025217315184846482, |
|
"acc_norm": 0.44871794871794873, |
|
"acc_norm_stderr": 0.025217315184846482 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.04830366024635331, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.04830366024635331 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4187192118226601, |
|
"acc_stderr": 0.03471192860518468, |
|
"acc_norm": 0.4187192118226601, |
|
"acc_norm_stderr": 0.03471192860518468 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.432258064516129, |
|
"acc_stderr": 0.028181739720019416, |
|
"acc_norm": 0.432258064516129, |
|
"acc_norm_stderr": 0.028181739720019416 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6111111111111112, |
|
"acc_stderr": 0.03193705726200293, |
|
"acc_norm": 0.6111111111111112, |
|
"acc_norm_stderr": 0.03193705726200293 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.41132075471698115, |
|
"acc_stderr": 0.03028500925900979, |
|
"acc_norm": 0.41132075471698115, |
|
"acc_norm_stderr": 0.03028500925900979 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5363636363636364, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.5363636363636364, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.026719240783712163, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.026719240783712163 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943343, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943343 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6019900497512438, |
|
"acc_stderr": 0.034611994290400135, |
|
"acc_norm": 0.6019900497512438, |
|
"acc_norm_stderr": 0.034611994290400135 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4277456647398844, |
|
"acc_stderr": 0.03772446857518026, |
|
"acc_norm": 0.4277456647398844, |
|
"acc_norm_stderr": 0.03772446857518026 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.291005291005291, |
|
"acc_stderr": 0.02339382650048487, |
|
"acc_norm": 0.291005291005291, |
|
"acc_norm_stderr": 0.02339382650048487 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3541666666666667, |
|
"acc_stderr": 0.039994111357535424, |
|
"acc_norm": 0.3541666666666667, |
|
"acc_norm_stderr": 0.039994111357535424 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.71, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.71, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.02691864538323901, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.02691864538323901 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5337423312883436, |
|
"acc_stderr": 0.039194155450484096, |
|
"acc_norm": 0.5337423312883436, |
|
"acc_norm_stderr": 0.039194155450484096 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4876543209876543, |
|
"acc_stderr": 0.027812262269327235, |
|
"acc_norm": 0.4876543209876543, |
|
"acc_norm_stderr": 0.027812262269327235 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.538860103626943, |
|
"acc_stderr": 0.03597524411734578, |
|
"acc_norm": 0.538860103626943, |
|
"acc_norm_stderr": 0.03597524411734578 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.04227054451232199, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.04227054451232199 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5853211009174312, |
|
"acc_stderr": 0.021122903208602592, |
|
"acc_norm": 0.5853211009174312, |
|
"acc_norm_stderr": 0.021122903208602592 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.04134913018303316, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.04134913018303316 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4150326797385621, |
|
"acc_stderr": 0.028213504177824093, |
|
"acc_norm": 0.4150326797385621, |
|
"acc_norm_stderr": 0.028213504177824093 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6776859504132231, |
|
"acc_stderr": 0.04266416363352167, |
|
"acc_norm": 0.6776859504132231, |
|
"acc_norm_stderr": 0.04266416363352167 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.03988903703336285, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.03988903703336285 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39869281045751637, |
|
"acc_stderr": 0.019808281317449848, |
|
"acc_norm": 0.39869281045751637, |
|
"acc_norm_stderr": 0.019808281317449848 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.36524822695035464, |
|
"acc_stderr": 0.028723863853281278, |
|
"acc_norm": 0.36524822695035464, |
|
"acc_norm_stderr": 0.028723863853281278 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697624, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697624 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25139664804469275, |
|
"acc_stderr": 0.014508979453553974, |
|
"acc_norm": 0.25139664804469275, |
|
"acc_norm_stderr": 0.014508979453553974 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.35294117647058826, |
|
"acc_stderr": 0.029029422815681417, |
|
"acc_norm": 0.35294117647058826, |
|
"acc_norm_stderr": 0.029029422815681417 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4897959183673469, |
|
"acc_stderr": 0.03200255347893782, |
|
"acc_norm": 0.4897959183673469, |
|
"acc_norm_stderr": 0.03200255347893782 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6413502109704642, |
|
"acc_stderr": 0.031219569445301847, |
|
"acc_norm": 0.6413502109704642, |
|
"acc_norm_stderr": 0.031219569445301847 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.36114732724902215, |
|
"acc_stderr": 0.01226793547751903, |
|
"acc_norm": 0.36114732724902215, |
|
"acc_norm_stderr": 0.01226793547751903 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5049019607843137, |
|
"acc_stderr": 0.035091433756067866, |
|
"acc_norm": 0.5049019607843137, |
|
"acc_norm_stderr": 0.035091433756067866 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5212121212121212, |
|
"acc_stderr": 0.03900828913737301, |
|
"acc_norm": 0.5212121212121212, |
|
"acc_norm_stderr": 0.03900828913737301 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29008567931456547, |
|
"mc1_stderr": 0.01588623687420952, |
|
"mc2": 0.4478974507988722, |
|
"mc2_stderr": 0.015169839199333743 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4817001180637544, |
|
"acc_stderr": 0.017178836639177745, |
|
"acc_norm": 0.6139315230224321, |
|
"acc_norm_stderr": 0.016738130760321757 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Kaeri-Jenti/LDCC-with-openorca2", |
|
"model_sha": "e0f7d47f657361c2fffd4a67428b5ab523b84261", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |