|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4598976109215017, |
|
"acc_stderr": 0.01456431885692485, |
|
"acc_norm": 0.5170648464163823, |
|
"acc_norm_stderr": 0.014602878388536591 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.45598486357299345, |
|
"acc_stderr": 0.004970410081009441, |
|
"acc_norm": 0.6200955984863573, |
|
"acc_norm_stderr": 0.004843708550386534 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6549707602339181, |
|
"acc_stderr": 0.036459813773888065, |
|
"acc_norm": 0.6549707602339181, |
|
"acc_norm_stderr": 0.036459813773888065 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6504854368932039, |
|
"acc_stderr": 0.047211885060971716, |
|
"acc_norm": 0.6504854368932039, |
|
"acc_norm_stderr": 0.047211885060971716 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.669220945083014, |
|
"acc_stderr": 0.016824818462563756, |
|
"acc_norm": 0.669220945083014, |
|
"acc_norm_stderr": 0.016824818462563756 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.43703703703703706, |
|
"acc_stderr": 0.04284958639753398, |
|
"acc_norm": 0.43703703703703706, |
|
"acc_norm_stderr": 0.04284958639753398 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.502127659574468, |
|
"acc_stderr": 0.03268572658667493, |
|
"acc_norm": 0.502127659574468, |
|
"acc_norm_stderr": 0.03268572658667493 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.03892494720807614, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.03892494720807614 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5980707395498392, |
|
"acc_stderr": 0.027846476005930473, |
|
"acc_norm": 0.5980707395498392, |
|
"acc_norm_stderr": 0.027846476005930473 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5560538116591929, |
|
"acc_stderr": 0.03334625674242728, |
|
"acc_norm": 0.5560538116591929, |
|
"acc_norm_stderr": 0.03334625674242728 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6641221374045801, |
|
"acc_stderr": 0.04142313771996665, |
|
"acc_norm": 0.6641221374045801, |
|
"acc_norm_stderr": 0.04142313771996665 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7373737373737373, |
|
"acc_stderr": 0.03135305009533087, |
|
"acc_norm": 0.7373737373737373, |
|
"acc_norm_stderr": 0.03135305009533087 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5379310344827586, |
|
"acc_stderr": 0.041546596717075474, |
|
"acc_norm": 0.5379310344827586, |
|
"acc_norm_stderr": 0.041546596717075474 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.045766654032077615, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.045766654032077615 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6596638655462185, |
|
"acc_stderr": 0.03077805742293167, |
|
"acc_norm": 0.6596638655462185, |
|
"acc_norm_stderr": 0.03077805742293167 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5564102564102564, |
|
"acc_stderr": 0.025189149894764208, |
|
"acc_norm": 0.5564102564102564, |
|
"acc_norm_stderr": 0.025189149894764208 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6759259259259259, |
|
"acc_stderr": 0.04524596007030048, |
|
"acc_norm": 0.6759259259259259, |
|
"acc_norm_stderr": 0.04524596007030048 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.03481904844438804, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.03481904844438804 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.603225806451613, |
|
"acc_stderr": 0.027831231605767944, |
|
"acc_norm": 0.603225806451613, |
|
"acc_norm_stderr": 0.027831231605767944 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.811965811965812, |
|
"acc_stderr": 0.025598193686652247, |
|
"acc_norm": 0.811965811965812, |
|
"acc_norm_stderr": 0.025598193686652247 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5283018867924528, |
|
"acc_stderr": 0.030723535249006107, |
|
"acc_norm": 0.5283018867924528, |
|
"acc_norm_stderr": 0.030723535249006107 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6090909090909091, |
|
"acc_stderr": 0.04673752333670239, |
|
"acc_norm": 0.6090909090909091, |
|
"acc_norm_stderr": 0.04673752333670239 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.02944316932303154, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.02944316932303154 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3841059602649007, |
|
"acc_stderr": 0.03971301814719198, |
|
"acc_norm": 0.3841059602649007, |
|
"acc_norm_stderr": 0.03971301814719198 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7313432835820896, |
|
"acc_stderr": 0.031343283582089536, |
|
"acc_norm": 0.7313432835820896, |
|
"acc_norm_stderr": 0.031343283582089536 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.038124005659748335, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.038124005659748335 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.43386243386243384, |
|
"acc_stderr": 0.025525034382474884, |
|
"acc_norm": 0.43386243386243384, |
|
"acc_norm_stderr": 0.025525034382474884 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.04174752578923185, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.04174752578923185 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.73, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.73, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.6069364161849711, |
|
"acc_stderr": 0.02629622791561367, |
|
"acc_norm": 0.6069364161849711, |
|
"acc_norm_stderr": 0.02629622791561367 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4785276073619632, |
|
"acc_stderr": 0.03924746876751129, |
|
"acc_norm": 0.4785276073619632, |
|
"acc_norm_stderr": 0.03924746876751129 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6234567901234568, |
|
"acc_stderr": 0.026959344518747784, |
|
"acc_norm": 0.6234567901234568, |
|
"acc_norm_stderr": 0.026959344518747784 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7098445595854922, |
|
"acc_stderr": 0.03275264467791515, |
|
"acc_norm": 0.7098445595854922, |
|
"acc_norm_stderr": 0.03275264467791515 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.04598188057816542, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.04598188057816542 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7229357798165138, |
|
"acc_stderr": 0.019188482590169538, |
|
"acc_norm": 0.7229357798165138, |
|
"acc_norm_stderr": 0.019188482590169538 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.42063492063492064, |
|
"acc_stderr": 0.04415438226743743, |
|
"acc_norm": 0.42063492063492064, |
|
"acc_norm_stderr": 0.04415438226743743 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5751633986928104, |
|
"acc_stderr": 0.028304576673141114, |
|
"acc_norm": 0.5751633986928104, |
|
"acc_norm_stderr": 0.028304576673141114 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7272727272727273, |
|
"acc_stderr": 0.04065578140908705, |
|
"acc_norm": 0.7272727272727273, |
|
"acc_norm_stderr": 0.04065578140908705 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5921052631578947, |
|
"acc_stderr": 0.039993097127774734, |
|
"acc_norm": 0.5921052631578947, |
|
"acc_norm_stderr": 0.039993097127774734 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5065359477124183, |
|
"acc_stderr": 0.020226106567657807, |
|
"acc_norm": 0.5065359477124183, |
|
"acc_norm_stderr": 0.020226106567657807 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.42907801418439717, |
|
"acc_stderr": 0.029525914302558555, |
|
"acc_norm": 0.42907801418439717, |
|
"acc_norm_stderr": 0.029525914302558555 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.38392857142857145, |
|
"acc_stderr": 0.04616143075028546, |
|
"acc_norm": 0.38392857142857145, |
|
"acc_norm_stderr": 0.04616143075028546 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.0340470532865388, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.0340470532865388 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.18435754189944134, |
|
"acc_stderr": 0.01296915281188346, |
|
"acc_norm": 0.18435754189944134, |
|
"acc_norm_stderr": 0.01296915281188346 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.71, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.71, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.03035969707904612, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.03035969707904612 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.6530612244897959, |
|
"acc_stderr": 0.0304725260267265, |
|
"acc_norm": 0.6530612244897959, |
|
"acc_norm_stderr": 0.0304725260267265 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7426160337552743, |
|
"acc_stderr": 0.028458820991460305, |
|
"acc_norm": 0.7426160337552743, |
|
"acc_norm_stderr": 0.028458820991460305 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4002607561929596, |
|
"acc_stderr": 0.012513582529136208, |
|
"acc_norm": 0.4002607561929596, |
|
"acc_norm_stderr": 0.012513582529136208 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6323529411764706, |
|
"acc_stderr": 0.03384132045674118, |
|
"acc_norm": 0.6323529411764706, |
|
"acc_norm_stderr": 0.03384132045674118 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6545454545454545, |
|
"acc_stderr": 0.037131580674819135, |
|
"acc_norm": 0.6545454545454545, |
|
"acc_norm_stderr": 0.037131580674819135 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2974296205630355, |
|
"mc1_stderr": 0.016002651487361002, |
|
"mc2": 0.45096541545871954, |
|
"mc2_stderr": 0.015351270163058638 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5076741440377804, |
|
"acc_stderr": 0.017188329219654273, |
|
"acc_norm": 0.5938606847697757, |
|
"acc_norm_stderr": 0.0168847495031914 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Megastudy/M-SOLAR-10.7B-v1.2", |
|
"model_sha": "21d08bb6676a9ad729f4e048c3eaae9a4897149a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |