{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2687713310580205, "acc_stderr": 0.012955065963710682, "acc_norm": 0.3225255972696246, "acc_norm_stderr": 0.013659980894277366 }, "harness|ko_hellaswag|10": { "acc": 0.3323043218482374, "acc_stderr": 0.004700767741735566, "acc_norm": 0.4056960764787891, "acc_norm_stderr": 0.004900227226433385 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4327485380116959, "acc_stderr": 0.037999786443706066, "acc_norm": 0.4327485380116959, "acc_norm_stderr": 0.037999786443706066 }, "harness|ko_mmlu_management|5": { "acc": 0.2912621359223301, "acc_stderr": 0.044986763205729224, "acc_norm": 0.2912621359223301, "acc_norm_stderr": 0.044986763205729224 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3371647509578544, "acc_stderr": 0.01690520742080355, "acc_norm": 0.3371647509578544, "acc_norm_stderr": 0.01690520742080355 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03944624162501117, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03944624162501117 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3659574468085106, "acc_stderr": 0.031489558297455304, "acc_norm": 0.3659574468085106, "acc_norm_stderr": 0.031489558297455304 }, "harness|ko_mmlu_virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.03647168523683227, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.03647168523683227 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.28938906752411575, "acc_stderr": 0.025755865922632945, "acc_norm": 0.28938906752411575, "acc_norm_stderr": 0.025755865922632945 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3542600896860987, "acc_stderr": 0.03210062154134987, "acc_norm": 0.3542600896860987, "acc_norm_stderr": 0.03210062154134987 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.29770992366412213, "acc_stderr": 0.04010358942462203, "acc_norm": 0.29770992366412213, "acc_norm_stderr": 0.04010358942462203 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.04605661864718381, "acc_norm": 0.3, "acc_norm_stderr": 0.04605661864718381 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.29292929292929293, "acc_stderr": 0.032424979581788166, "acc_norm": 0.29292929292929293, "acc_norm_stderr": 0.032424979581788166 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3310344827586207, "acc_stderr": 0.039215453124671215, "acc_norm": 0.3310344827586207, "acc_norm_stderr": 0.039215453124671215 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03708284662416544, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03708284662416544 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.27310924369747897, "acc_stderr": 0.028942004040998167, "acc_norm": 0.27310924369747897, "acc_norm_stderr": 0.028942004040998167 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.28717948717948716, "acc_stderr": 0.022939925418530616, "acc_norm": 0.28717948717948716, "acc_norm_stderr": 0.022939925418530616 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.32407407407407407, "acc_stderr": 0.04524596007030049, "acc_norm": 0.32407407407407407, "acc_norm_stderr": 0.04524596007030049 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.0316185633535861, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.0316185633535861 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.25483870967741934, "acc_stderr": 0.024790118459332208, "acc_norm": 0.25483870967741934, "acc_norm_stderr": 0.024790118459332208 }, "harness|ko_mmlu_marketing|5": { "acc": 0.452991452991453, "acc_stderr": 0.03261099873098619, "acc_norm": 0.452991452991453, "acc_norm_stderr": 0.03261099873098619 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2339622641509434, "acc_stderr": 0.026055296901152922, "acc_norm": 0.2339622641509434, "acc_norm_stderr": 0.026055296901152922 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4090909090909091, "acc_stderr": 0.04709306978661896, "acc_norm": 0.4090909090909091, "acc_norm_stderr": 0.04709306978661896 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524575, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524575 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.036030385453603854, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.036030385453603854 }, "harness|ko_mmlu_sociology|5": { "acc": 0.39303482587064675, "acc_stderr": 0.0345368246603156, "acc_norm": 0.39303482587064675, "acc_norm_stderr": 0.0345368246603156 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2947976878612717, "acc_stderr": 0.03476599607516478, "acc_norm": 0.2947976878612717, "acc_norm_stderr": 0.03476599607516478 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.022860838309232072, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.022860838309232072 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2916666666666667, "acc_stderr": 0.03800968060554858, "acc_norm": 0.2916666666666667, "acc_norm_stderr": 0.03800968060554858 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3092485549132948, "acc_stderr": 0.02488314057007176, "acc_norm": 0.3092485549132948, "acc_norm_stderr": 0.02488314057007176 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.25766871165644173, "acc_stderr": 0.03436150827846917, "acc_norm": 0.25766871165644173, "acc_norm_stderr": 0.03436150827846917 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.32407407407407407, "acc_stderr": 0.026041766202717163, "acc_norm": 0.32407407407407407, "acc_norm_stderr": 0.026041766202717163 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.23834196891191708, "acc_stderr": 0.030748905363909895, "acc_norm": 0.23834196891191708, "acc_norm_stderr": 0.030748905363909895 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.0409698513984367, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.0409698513984367 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.26788990825688075, "acc_stderr": 0.018987462257978652, "acc_norm": 0.26788990825688075, "acc_norm_stderr": 0.018987462257978652 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.30718954248366015, "acc_stderr": 0.026415601914389002, "acc_norm": 0.30718954248366015, "acc_norm_stderr": 0.026415601914389002 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_international_law|5": { "acc": 0.36363636363636365, "acc_stderr": 0.04391326286724071, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.04391326286724071 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.03554180368025689, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.03554180368025689 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.29901960784313725, "acc_stderr": 0.01852175621542303, "acc_norm": 0.29901960784313725, "acc_norm_stderr": 0.01852175621542303 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.30851063829787234, "acc_stderr": 0.027553366165101373, "acc_norm": 0.30851063829787234, "acc_norm_stderr": 0.027553366165101373 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.18981481481481483, "acc_stderr": 0.026744714834691943, "acc_norm": 0.18981481481481483, "acc_norm_stderr": 0.026744714834691943 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.27039106145251396, "acc_stderr": 0.014854993938010083, "acc_norm": 0.27039106145251396, "acc_norm_stderr": 0.014854993938010083 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2426470588235294, "acc_stderr": 0.026040662474201278, "acc_norm": 0.2426470588235294, "acc_norm_stderr": 0.026040662474201278 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.24897959183673468, "acc_stderr": 0.027682979522960244, "acc_norm": 0.24897959183673468, "acc_norm_stderr": 0.027682979522960244 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.35443037974683544, "acc_stderr": 0.0311373042971858, "acc_norm": 0.35443037974683544, "acc_norm_stderr": 0.0311373042971858 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.28292046936114734, "acc_stderr": 0.011503891323188976, "acc_norm": 0.28292046936114734, "acc_norm_stderr": 0.011503891323188976 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.30392156862745096, "acc_stderr": 0.032282103870378935, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.032282103870378935 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.03427743175816524, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.03427743175816524 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2631578947368421, "mc1_stderr": 0.01541524174023704, "mc2": 0.4418547715713716, "mc2_stderr": 0.01568020575059561 }, "harness|ko_commongen_v2|2": { "acc": 0.3010625737898465, "acc_stderr": 0.015771113299945454, "acc_norm": 0.448642266824085, "acc_norm_stderr": 0.017099430514725792 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Herry443/Mistral-7B-KNUT-v0.1", "model_sha": "b90832d18d355d77c2e25181f59075070d946978", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }