{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3430034129692833, "acc_stderr": 0.013872423223718173, "acc_norm": 0.39590443686006827, "acc_norm_stderr": 0.01429122839353659 }, "harness|ko_hellaswag|10": { "acc": 0.4023102967536347, "acc_stderr": 0.00489361701497531, "acc_norm": 0.5319657438757219, "acc_norm_stderr": 0.0049795737655758615 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|ko_mmlu_management|5": { "acc": 0.27184466019417475, "acc_stderr": 0.044052680241409216, "acc_norm": 0.27184466019417475, "acc_norm_stderr": 0.044052680241409216 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.37292464878671777, "acc_stderr": 0.01729286826945392, "acc_norm": 0.37292464878671777, "acc_norm_stderr": 0.01729286826945392 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.0402477840197711, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.0402477840197711 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.35319148936170214, "acc_stderr": 0.031245325202761926, "acc_norm": 0.35319148936170214, "acc_norm_stderr": 0.031245325202761926 }, "harness|ko_mmlu_virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.03410646614071856, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.03410646614071856 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3504823151125402, "acc_stderr": 0.027098652621301747, "acc_norm": 0.3504823151125402, "acc_norm_stderr": 0.027098652621301747 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4080717488789238, "acc_stderr": 0.03298574607842822, "acc_norm": 0.4080717488789238, "acc_norm_stderr": 0.03298574607842822 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.366412213740458, "acc_stderr": 0.04225875451969638, "acc_norm": 0.366412213740458, "acc_norm_stderr": 0.04225875451969638 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.047609522856952344, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952344 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.20707070707070707, "acc_stderr": 0.028869778460267042, "acc_norm": 0.20707070707070707, "acc_norm_stderr": 0.028869778460267042 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.03695183311650232, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.03695183311650232 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03708284662416545, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03708284662416545 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2689075630252101, "acc_stderr": 0.02880139219363128, "acc_norm": 0.2689075630252101, "acc_norm_stderr": 0.02880139219363128 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2564102564102564, "acc_stderr": 0.022139081103971527, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.022139081103971527 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04557239513497751, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04557239513497751 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.270935960591133, "acc_stderr": 0.03127090713297698, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.03127090713297698 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3064516129032258, "acc_stderr": 0.02622648565255389, "acc_norm": 0.3064516129032258, "acc_norm_stderr": 0.02622648565255389 }, "harness|ko_mmlu_marketing|5": { "acc": 0.4017094017094017, "acc_stderr": 0.03211693751051622, "acc_norm": 0.4017094017094017, "acc_norm_stderr": 0.03211693751051622 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3283018867924528, "acc_stderr": 0.02890159361241178, "acc_norm": 0.3283018867924528, "acc_norm_stderr": 0.02890159361241178 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.37272727272727274, "acc_stderr": 0.04631381319425463, "acc_norm": 0.37272727272727274, "acc_norm_stderr": 0.04631381319425463 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.02696242432507383, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.02696242432507383 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.035118075718047245, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.035118075718047245 }, "harness|ko_mmlu_sociology|5": { "acc": 0.3482587064676617, "acc_stderr": 0.03368787466115459, "acc_norm": 0.3482587064676617, "acc_norm_stderr": 0.03368787466115459 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.23699421965317918, "acc_stderr": 0.03242414757483099, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.03242414757483099 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415415, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415415 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080343, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080343 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3265895953757225, "acc_stderr": 0.025248264774242832, "acc_norm": 0.3265895953757225, "acc_norm_stderr": 0.025248264774242832 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.294478527607362, "acc_stderr": 0.03581165790474082, "acc_norm": 0.294478527607362, "acc_norm_stderr": 0.03581165790474082 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.32098765432098764, "acc_stderr": 0.02597656601086274, "acc_norm": 0.32098765432098764, "acc_norm_stderr": 0.02597656601086274 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.2694300518134715, "acc_stderr": 0.032018671228777947, "acc_norm": 0.2694300518134715, "acc_norm_stderr": 0.032018671228777947 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537315, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537315 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.28623853211009176, "acc_stderr": 0.019379436628919975, "acc_norm": 0.28623853211009176, "acc_norm_stderr": 0.019379436628919975 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.1984126984126984, "acc_stderr": 0.03567016675276862, "acc_norm": 0.1984126984126984, "acc_norm_stderr": 0.03567016675276862 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3431372549019608, "acc_stderr": 0.027184498909941616, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.027184498909941616 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|ko_mmlu_international_law|5": { "acc": 0.39669421487603307, "acc_stderr": 0.044658697805310094, "acc_norm": 0.39669421487603307, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.23026315789473684, "acc_stderr": 0.034260594244031654, "acc_norm": 0.23026315789473684, "acc_norm_stderr": 0.034260594244031654 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.28431372549019607, "acc_stderr": 0.018249024411207664, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.018249024411207664 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340461004, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340461004 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.043270409325787296, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.043270409325787296 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.029886910547626964, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.029886910547626964 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.21691176470588236, "acc_stderr": 0.025035845227711254, "acc_norm": 0.21691176470588236, "acc_norm_stderr": 0.025035845227711254 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0289205832206756, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0289205832206756 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.3755274261603376, "acc_stderr": 0.03152256243091156, "acc_norm": 0.3755274261603376, "acc_norm_stderr": 0.03152256243091156 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2816166883963494, "acc_stderr": 0.011487783272786694, "acc_norm": 0.2816166883963494, "acc_norm_stderr": 0.011487783272786694 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604246, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604246 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.296969696969697, "acc_stderr": 0.03567969772268049, "acc_norm": 0.296969696969697, "acc_norm_stderr": 0.03567969772268049 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2582619339045288, "mc1_stderr": 0.015321821688476189, "mc2": 0.4026846131079194, "mc2_stderr": 0.014939937441482552 }, "harness|ko_commongen_v2|2": { "acc": 0.31641086186540734, "acc_stderr": 0.015989617951065477, "acc_norm": 0.4639905548996458, "acc_norm_stderr": 0.017145715365486654 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "devhyun88/kullama2-7b-platypus-kogpt4", "model_sha": "033fb6e8db347530e49449d888d780b777e48715", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }