{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.19539249146757678, "acc_stderr": 0.01158690718995291, "acc_norm": 0.24829351535836178, "acc_norm_stderr": 0.012624912868089755 }, "harness|ko_hellaswag|10": { "acc": 0.2832105158334993, "acc_stderr": 0.0044963697421321076, "acc_norm": 0.3134833698466441, "acc_norm_stderr": 0.0046296088632722925 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.29239766081871343, "acc_stderr": 0.034886477134579236, "acc_norm": 0.29239766081871343, "acc_norm_stderr": 0.034886477134579236 }, "harness|ko_mmlu_management|5": { "acc": 0.1650485436893204, "acc_stderr": 0.03675668832233188, "acc_norm": 0.1650485436893204, "acc_norm_stderr": 0.03675668832233188 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2681992337164751, "acc_stderr": 0.015842430835269438, "acc_norm": 0.2681992337164751, "acc_norm_stderr": 0.015842430835269438 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066655, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066655 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2170212765957447, "acc_stderr": 0.026947483121496252, "acc_norm": 0.2170212765957447, "acc_norm_stderr": 0.026947483121496252 }, "harness|ko_mmlu_virology|5": { "acc": 0.21686746987951808, "acc_stderr": 0.03208284450356365, "acc_norm": 0.21686746987951808, "acc_norm_stderr": 0.03208284450356365 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2572347266881029, "acc_stderr": 0.024826171289250888, "acc_norm": 0.2572347266881029, "acc_norm_stderr": 0.024826171289250888 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.2062780269058296, "acc_stderr": 0.027157150479563824, "acc_norm": 0.2062780269058296, "acc_norm_stderr": 0.027157150479563824 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.03727673575596917, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.03727673575596917 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.18, "acc_stderr": 0.03861229196653694, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653694 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.26262626262626265, "acc_stderr": 0.031353050095330834, "acc_norm": 0.26262626262626265, "acc_norm_stderr": 0.031353050095330834 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3310344827586207, "acc_stderr": 0.03921545312467122, "acc_norm": 0.3310344827586207, "acc_norm_stderr": 0.03921545312467122 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2815126050420168, "acc_stderr": 0.029213549414372174, "acc_norm": 0.2815126050420168, "acc_norm_stderr": 0.029213549414372174 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2923076923076923, "acc_stderr": 0.02306043838085774, "acc_norm": 0.2923076923076923, "acc_norm_stderr": 0.02306043838085774 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.18518518518518517, "acc_stderr": 0.037552658650371835, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.037552658650371835 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2660098522167488, "acc_stderr": 0.031089826002937523, "acc_norm": 0.2660098522167488, "acc_norm_stderr": 0.031089826002937523 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2806451612903226, "acc_stderr": 0.0255606047210229, "acc_norm": 0.2806451612903226, "acc_norm_stderr": 0.0255606047210229 }, "harness|ko_mmlu_marketing|5": { "acc": 0.25213675213675213, "acc_stderr": 0.02844796547623102, "acc_norm": 0.25213675213675213, "acc_norm_stderr": 0.02844796547623102 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2339622641509434, "acc_stderr": 0.02605529690115292, "acc_norm": 0.2339622641509434, "acc_norm_stderr": 0.02605529690115292 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2, "acc_stderr": 0.038313051408846006, "acc_norm": 0.2, "acc_norm_stderr": 0.038313051408846006 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02671924078371217, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02671924078371217 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.2736318407960199, "acc_stderr": 0.031524391865554, "acc_norm": 0.2736318407960199, "acc_norm_stderr": 0.031524391865554 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.035149425512674394, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.035149425512674394 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2619047619047619, "acc_stderr": 0.02264421261552521, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.02264421261552521 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080342, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080342 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.19, "acc_stderr": 0.03942772444036624, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036624 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2976878612716763, "acc_stderr": 0.024617055388677006, "acc_norm": 0.2976878612716763, "acc_norm_stderr": 0.024617055388677006 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2822085889570552, "acc_stderr": 0.03536117886664743, "acc_norm": 0.2822085889570552, "acc_norm_stderr": 0.03536117886664743 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.30246913580246915, "acc_stderr": 0.025557653981868034, "acc_norm": 0.30246913580246915, "acc_norm_stderr": 0.025557653981868034 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3316062176165803, "acc_stderr": 0.03397636541089116, "acc_norm": 0.3316062176165803, "acc_norm_stderr": 0.03397636541089116 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.23302752293577983, "acc_stderr": 0.018125669180861507, "acc_norm": 0.23302752293577983, "acc_norm_stderr": 0.018125669180861507 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.032684540130117436, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.032684540130117436 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.0248480182638752, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.0248480182638752 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_international_law|5": { "acc": 0.371900826446281, "acc_stderr": 0.04412015806624503, "acc_norm": 0.371900826446281, "acc_norm_stderr": 0.04412015806624503 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.27631578947368424, "acc_stderr": 0.03639057569952924, "acc_norm": 0.27631578947368424, "acc_norm_stderr": 0.03639057569952924 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.27124183006535946, "acc_stderr": 0.017986615304030305, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.017986615304030305 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880592, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880592 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755807, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755807 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.25, "acc_stderr": 0.029531221160930918, "acc_norm": 0.25, "acc_norm_stderr": 0.029531221160930918 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808852, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808852 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2647058823529412, "acc_stderr": 0.026799562024887653, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.026799562024887653 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39591836734693875, "acc_stderr": 0.03130802899065685, "acc_norm": 0.39591836734693875, "acc_norm_stderr": 0.03130802899065685 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.25316455696202533, "acc_stderr": 0.028304657943035303, "acc_norm": 0.25316455696202533, "acc_norm_stderr": 0.028304657943035303 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2588005215123859, "acc_stderr": 0.01118610904656461, "acc_norm": 0.2588005215123859, "acc_norm_stderr": 0.01118610904656461 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.01481619599193158, "mc2": 0.46005718929477757, "mc2_stderr": 0.016990439061351184 }, "harness|ko_commongen_v2|2": { "acc": 0.24793388429752067, "acc_stderr": 0.01484604496825225, "acc_norm": 0.29161747343565525, "acc_norm_stderr": 0.015626276690070242 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "eclipsemint/kollama2-7b-v1.3", "model_sha": "ba1caccde94a38f8e099177229e71b93a9aac534", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }