|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3651877133105802, |
|
"acc_stderr": 0.0140702655192688, |
|
"acc_norm": 0.4206484641638225, |
|
"acc_norm_stderr": 0.014426211252508397 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.40509858593905596, |
|
"acc_stderr": 0.004899078300184257, |
|
"acc_norm": 0.5417247560246963, |
|
"acc_norm_stderr": 0.004972377085916328 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49707602339181284, |
|
"acc_stderr": 0.03834759370936839, |
|
"acc_norm": 0.49707602339181284, |
|
"acc_norm_stderr": 0.03834759370936839 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5825242718446602, |
|
"acc_stderr": 0.048828405482122375, |
|
"acc_norm": 0.5825242718446602, |
|
"acc_norm_stderr": 0.048828405482122375 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.545338441890166, |
|
"acc_stderr": 0.017806304585052602, |
|
"acc_norm": 0.545338441890166, |
|
"acc_norm_stderr": 0.017806304585052602 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4740740740740741, |
|
"acc_stderr": 0.04313531696750574, |
|
"acc_norm": 0.4740740740740741, |
|
"acc_norm_stderr": 0.04313531696750574 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.44680851063829785, |
|
"acc_stderr": 0.0325005368436584, |
|
"acc_norm": 0.44680851063829785, |
|
"acc_norm_stderr": 0.0325005368436584 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.45180722891566266, |
|
"acc_stderr": 0.03874371556587953, |
|
"acc_norm": 0.45180722891566266, |
|
"acc_norm_stderr": 0.03874371556587953 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5594855305466238, |
|
"acc_stderr": 0.02819640057419743, |
|
"acc_norm": 0.5594855305466238, |
|
"acc_norm_stderr": 0.02819640057419743 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4618834080717489, |
|
"acc_stderr": 0.033460150119732274, |
|
"acc_norm": 0.4618834080717489, |
|
"acc_norm_stderr": 0.033460150119732274 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.549618320610687, |
|
"acc_stderr": 0.04363643698524779, |
|
"acc_norm": 0.549618320610687, |
|
"acc_norm_stderr": 0.04363643698524779 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5808080808080808, |
|
"acc_stderr": 0.035155207286704175, |
|
"acc_norm": 0.5808080808080808, |
|
"acc_norm_stderr": 0.035155207286704175 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.496551724137931, |
|
"acc_stderr": 0.04166567577101579, |
|
"acc_norm": 0.496551724137931, |
|
"acc_norm_stderr": 0.04166567577101579 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.04488482852329017, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.04488482852329017 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.49159663865546216, |
|
"acc_stderr": 0.03247390276569669, |
|
"acc_norm": 0.49159663865546216, |
|
"acc_norm_stderr": 0.03247390276569669 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.45384615384615384, |
|
"acc_stderr": 0.025242770987126174, |
|
"acc_norm": 0.45384615384615384, |
|
"acc_norm_stderr": 0.025242770987126174 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5925925925925926, |
|
"acc_stderr": 0.047500773411999854, |
|
"acc_norm": 0.5925925925925926, |
|
"acc_norm_stderr": 0.047500773411999854 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.034304624161038716, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.034304624161038716 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4838709677419355, |
|
"acc_stderr": 0.02842920317672455, |
|
"acc_norm": 0.4838709677419355, |
|
"acc_norm_stderr": 0.02842920317672455 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7008547008547008, |
|
"acc_stderr": 0.029996951858349483, |
|
"acc_norm": 0.7008547008547008, |
|
"acc_norm_stderr": 0.029996951858349483 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.47924528301886793, |
|
"acc_stderr": 0.030746349975723463, |
|
"acc_norm": 0.47924528301886793, |
|
"acc_norm_stderr": 0.030746349975723463 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5363636363636364, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.5363636363636364, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.31851851851851853, |
|
"acc_stderr": 0.028406533090608463, |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.028406533090608463 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3576158940397351, |
|
"acc_stderr": 0.03913453431177258, |
|
"acc_norm": 0.3576158940397351, |
|
"acc_norm_stderr": 0.03913453431177258 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6268656716417911, |
|
"acc_stderr": 0.034198326081760065, |
|
"acc_norm": 0.6268656716417911, |
|
"acc_norm_stderr": 0.034198326081760065 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.03669072477416907, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.03669072477416907 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.023809523809523864, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.023809523809523864 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.041227287076512825, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.041227287076512825 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5086705202312138, |
|
"acc_stderr": 0.026915047355369804, |
|
"acc_norm": 0.5086705202312138, |
|
"acc_norm_stderr": 0.026915047355369804 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4662576687116564, |
|
"acc_stderr": 0.039194155450484096, |
|
"acc_norm": 0.4662576687116564, |
|
"acc_norm_stderr": 0.039194155450484096 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.027815973433878014, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.027815973433878014 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5751295336787565, |
|
"acc_stderr": 0.035674713352125395, |
|
"acc_norm": 0.5751295336787565, |
|
"acc_norm_stderr": 0.035674713352125395 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.32456140350877194, |
|
"acc_stderr": 0.044045561573747685, |
|
"acc_norm": 0.32456140350877194, |
|
"acc_norm_stderr": 0.044045561573747685 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6091743119266055, |
|
"acc_stderr": 0.020920058346111062, |
|
"acc_norm": 0.6091743119266055, |
|
"acc_norm_stderr": 0.020920058346111062 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.041049472699033945, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.041049472699033945 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4738562091503268, |
|
"acc_stderr": 0.028590752958852394, |
|
"acc_norm": 0.4738562091503268, |
|
"acc_norm_stderr": 0.028590752958852394 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6446280991735537, |
|
"acc_stderr": 0.0436923632657398, |
|
"acc_norm": 0.6446280991735537, |
|
"acc_norm_stderr": 0.0436923632657398 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.506578947368421, |
|
"acc_stderr": 0.040685900502249704, |
|
"acc_norm": 0.506578947368421, |
|
"acc_norm_stderr": 0.040685900502249704 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.41830065359477125, |
|
"acc_stderr": 0.019955975145835542, |
|
"acc_norm": 0.41830065359477125, |
|
"acc_norm_stderr": 0.019955975145835542 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.027640120545169938, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.027640120545169938 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.32142857142857145, |
|
"acc_stderr": 0.04432804055291519, |
|
"acc_norm": 0.32142857142857145, |
|
"acc_norm_stderr": 0.04432804055291519 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.39351851851851855, |
|
"acc_stderr": 0.03331747876370312, |
|
"acc_norm": 0.39351851851851855, |
|
"acc_norm_stderr": 0.03331747876370312 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2871508379888268, |
|
"acc_stderr": 0.015131608849963757, |
|
"acc_norm": 0.2871508379888268, |
|
"acc_norm_stderr": 0.015131608849963757 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41911764705882354, |
|
"acc_stderr": 0.029972807170464626, |
|
"acc_norm": 0.41911764705882354, |
|
"acc_norm_stderr": 0.029972807170464626 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4448979591836735, |
|
"acc_stderr": 0.031814251181977865, |
|
"acc_norm": 0.4448979591836735, |
|
"acc_norm_stderr": 0.031814251181977865 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6497890295358649, |
|
"acc_stderr": 0.03105239193758435, |
|
"acc_norm": 0.6497890295358649, |
|
"acc_norm_stderr": 0.03105239193758435 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3363754889178618, |
|
"acc_stderr": 0.012067083079452225, |
|
"acc_norm": 0.3363754889178618, |
|
"acc_norm_stderr": 0.012067083079452225 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.03507793834791324, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.03507793834791324 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.03825460278380025, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.03825460278380025 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2864137086903305, |
|
"mc1_stderr": 0.01582614243950234, |
|
"mc2": 0.4285492447923733, |
|
"mc2_stderr": 0.015103565647608173 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.551357733175915, |
|
"acc_stderr": 0.017099430514725778, |
|
"acc_norm": 0.6162927981109799, |
|
"acc_norm_stderr": 0.016718924637231826 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Minirecord/minyi_6b", |
|
"model_sha": "f8137f2ed10ff1496e75729ed15fad480073a7e4", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |