|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.33532423208191126, |
|
"acc_stderr": 0.01379618294778556, |
|
"acc_norm": 0.3848122866894198, |
|
"acc_norm_stderr": 0.014218371065251112 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35480979884485164, |
|
"acc_stderr": 0.004774778180345192, |
|
"acc_norm": 0.44911372236606256, |
|
"acc_norm_stderr": 0.00496387293685794 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.45614035087719296, |
|
"acc_stderr": 0.03820042586602966, |
|
"acc_norm": 0.45614035087719296, |
|
"acc_norm_stderr": 0.03820042586602966 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6019417475728155, |
|
"acc_stderr": 0.04846748253977238, |
|
"acc_norm": 0.6019417475728155, |
|
"acc_norm_stderr": 0.04846748253977238 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.41762452107279696, |
|
"acc_stderr": 0.017635637326951534, |
|
"acc_norm": 0.41762452107279696, |
|
"acc_norm_stderr": 0.017635637326951534 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.040943762699967946, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.040943762699967946 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2978723404255319, |
|
"acc_stderr": 0.029896145682095462, |
|
"acc_norm": 0.2978723404255319, |
|
"acc_norm_stderr": 0.029896145682095462 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3614457831325301, |
|
"acc_stderr": 0.0374005938202932, |
|
"acc_norm": 0.3614457831325301, |
|
"acc_norm_stderr": 0.0374005938202932 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4758842443729904, |
|
"acc_stderr": 0.028365041542564584, |
|
"acc_norm": 0.4758842443729904, |
|
"acc_norm_stderr": 0.028365041542564584 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3811659192825112, |
|
"acc_stderr": 0.032596251184168284, |
|
"acc_norm": 0.3811659192825112, |
|
"acc_norm_stderr": 0.032596251184168284 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3511450381679389, |
|
"acc_stderr": 0.04186445163013751, |
|
"acc_norm": 0.3511450381679389, |
|
"acc_norm_stderr": 0.04186445163013751 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.494949494949495, |
|
"acc_stderr": 0.035621707606254015, |
|
"acc_norm": 0.494949494949495, |
|
"acc_norm_stderr": 0.035621707606254015 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04082482904638628, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04082482904638628 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.04617034827006717, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.04617034827006717 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4957983193277311, |
|
"acc_stderr": 0.0324773433444811, |
|
"acc_norm": 0.4957983193277311, |
|
"acc_norm_stderr": 0.0324773433444811 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4256410256410256, |
|
"acc_stderr": 0.025069094387296546, |
|
"acc_norm": 0.4256410256410256, |
|
"acc_norm_stderr": 0.025069094387296546 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.04812917324536821, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.04812917324536821 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.35467980295566504, |
|
"acc_stderr": 0.03366124489051449, |
|
"acc_norm": 0.35467980295566504, |
|
"acc_norm_stderr": 0.03366124489051449 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4290322580645161, |
|
"acc_stderr": 0.02815603653823321, |
|
"acc_norm": 0.4290322580645161, |
|
"acc_norm_stderr": 0.02815603653823321 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6666666666666666, |
|
"acc_stderr": 0.03088273697413865, |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.03088273697413865 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4188679245283019, |
|
"acc_stderr": 0.03036505082911521, |
|
"acc_norm": 0.4188679245283019, |
|
"acc_norm_stderr": 0.03036505082911521 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.42727272727272725, |
|
"acc_stderr": 0.04738198703545483, |
|
"acc_norm": 0.42727272727272725, |
|
"acc_norm_stderr": 0.04738198703545483 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34814814814814815, |
|
"acc_stderr": 0.029045600290616258, |
|
"acc_norm": 0.34814814814814815, |
|
"acc_norm_stderr": 0.029045600290616258 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5174129353233831, |
|
"acc_stderr": 0.03533389234739245, |
|
"acc_norm": 0.5174129353233831, |
|
"acc_norm_stderr": 0.03533389234739245 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.37572254335260113, |
|
"acc_stderr": 0.03692820767264867, |
|
"acc_norm": 0.37572254335260113, |
|
"acc_norm_stderr": 0.03692820767264867 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3492063492063492, |
|
"acc_stderr": 0.024552292209342658, |
|
"acc_norm": 0.3492063492063492, |
|
"acc_norm_stderr": 0.024552292209342658 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.039420826399272135, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.039420826399272135 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.04793724854411019, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411019 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956913, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956913 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.47398843930635837, |
|
"acc_stderr": 0.026882643434022885, |
|
"acc_norm": 0.47398843930635837, |
|
"acc_norm_stderr": 0.026882643434022885 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.44171779141104295, |
|
"acc_stderr": 0.039015918258361836, |
|
"acc_norm": 0.44171779141104295, |
|
"acc_norm_stderr": 0.039015918258361836 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.42592592592592593, |
|
"acc_stderr": 0.027513747284379424, |
|
"acc_norm": 0.42592592592592593, |
|
"acc_norm_stderr": 0.027513747284379424 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5129533678756477, |
|
"acc_stderr": 0.0360722806104775, |
|
"acc_norm": 0.5129533678756477, |
|
"acc_norm_stderr": 0.0360722806104775 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.0404933929774814, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.0404933929774814 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.47155963302752296, |
|
"acc_stderr": 0.02140261569734804, |
|
"acc_norm": 0.47155963302752296, |
|
"acc_norm_stderr": 0.02140261569734804 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.04306241259127152, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.04306241259127152 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4117647058823529, |
|
"acc_stderr": 0.028180596328259297, |
|
"acc_norm": 0.4117647058823529, |
|
"acc_norm_stderr": 0.028180596328259297 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5867768595041323, |
|
"acc_stderr": 0.04495087843548408, |
|
"acc_norm": 0.5867768595041323, |
|
"acc_norm_stderr": 0.04495087843548408 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.03988903703336284, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.03988903703336284 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.32679738562091504, |
|
"acc_stderr": 0.018975427920507215, |
|
"acc_norm": 0.32679738562091504, |
|
"acc_norm_stderr": 0.018975427920507215 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.02812163604063988, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.02812163604063988 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3392857142857143, |
|
"acc_stderr": 0.04493949068613539, |
|
"acc_norm": 0.3392857142857143, |
|
"acc_norm_stderr": 0.04493949068613539 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.41203703703703703, |
|
"acc_stderr": 0.03356787758160835, |
|
"acc_norm": 0.41203703703703703, |
|
"acc_norm_stderr": 0.03356787758160835 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.329608938547486, |
|
"acc_stderr": 0.015721531075183884, |
|
"acc_norm": 0.329608938547486, |
|
"acc_norm_stderr": 0.015721531075183884 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.029408372932278746, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.029408372932278746 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.43673469387755104, |
|
"acc_stderr": 0.03175195237583322, |
|
"acc_norm": 0.43673469387755104, |
|
"acc_norm_stderr": 0.03175195237583322 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4810126582278481, |
|
"acc_stderr": 0.03252375148090448, |
|
"acc_norm": 0.4810126582278481, |
|
"acc_norm_stderr": 0.03252375148090448 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.29791395045632335, |
|
"acc_stderr": 0.011680717340400059, |
|
"acc_norm": 0.29791395045632335, |
|
"acc_norm_stderr": 0.011680717340400059 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.03198001660115072, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.03198001660115072 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.30303030303030304, |
|
"acc_stderr": 0.03588624800091707, |
|
"acc_norm": 0.30303030303030304, |
|
"acc_norm_stderr": 0.03588624800091707 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3317013463892289, |
|
"mc1_stderr": 0.01648214881024147, |
|
"mc2": 0.5171680571717291, |
|
"mc2_stderr": 0.01606077987901482 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.39787485242030696, |
|
"acc_stderr": 0.01682795905473339, |
|
"acc_norm": 0.4014167650531287, |
|
"acc_norm_stderr": 0.01685290785872906 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "HuggingFaceH4/zephyr-7b-beta", |
|
"model_sha": "3bac358730f8806e5c3dc7c7e19eb36e045bf720", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |