{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3447098976109215, "acc_stderr": 0.013888816286782112, "acc_norm": 0.4069965870307167, "acc_norm_stderr": 0.014356399418009135 }, "harness|ko_hellaswag|10": { "acc": 0.3681537542322247, "acc_stderr": 0.004813177057496271, "acc_norm": 0.47082254530969925, "acc_norm_stderr": 0.00498127832642802 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.038316105328219316, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.5242718446601942, "acc_stderr": 0.049449010929737795, "acc_norm": 0.5242718446601942, "acc_norm_stderr": 0.049449010929737795 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.45849297573435505, "acc_stderr": 0.017818248603465554, "acc_norm": 0.45849297573435505, "acc_norm_stderr": 0.017818248603465554 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.0402477840197711, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.0402477840197711 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.37872340425531914, "acc_stderr": 0.03170995606040655, "acc_norm": 0.37872340425531914, "acc_norm_stderr": 0.03170995606040655 }, "harness|ko_mmlu_virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4340836012861736, "acc_stderr": 0.0281502322445356, "acc_norm": 0.4340836012861736, "acc_norm_stderr": 0.0281502322445356 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.40358744394618834, "acc_stderr": 0.03292802819330315, "acc_norm": 0.40358744394618834, "acc_norm_stderr": 0.03292802819330315 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.043841400240780176, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.043841400240780176 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413008, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413008 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4068965517241379, "acc_stderr": 0.040937939812662374, "acc_norm": 0.4068965517241379, "acc_norm_stderr": 0.040937939812662374 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4789915966386555, "acc_stderr": 0.03244980849990029, "acc_norm": 0.4789915966386555, "acc_norm_stderr": 0.03244980849990029 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.43846153846153846, "acc_stderr": 0.02515826601686856, "acc_norm": 0.43846153846153846, "acc_norm_stderr": 0.02515826601686856 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04803752235190192, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04803752235190192 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.0338640574606209, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.0338640574606209 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.432258064516129, "acc_stderr": 0.028181739720019413, "acc_norm": 0.432258064516129, "acc_norm_stderr": 0.028181739720019413 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6965811965811965, "acc_stderr": 0.03011821010694263, "acc_norm": 0.6965811965811965, "acc_norm_stderr": 0.03011821010694263 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.41132075471698115, "acc_stderr": 0.030285009259009812, "acc_norm": 0.41132075471698115, "acc_norm_stderr": 0.030285009259009812 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.43636363636363634, "acc_stderr": 0.04750185058907297, "acc_norm": 0.43636363636363634, "acc_norm_stderr": 0.04750185058907297 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228402, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228402 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5671641791044776, "acc_stderr": 0.03503490923673282, "acc_norm": 0.5671641791044776, "acc_norm_stderr": 0.03503490923673282 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3988439306358382, "acc_stderr": 0.03733626655383509, "acc_norm": 0.3988439306358382, "acc_norm_stderr": 0.03733626655383509 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.31746031746031744, "acc_stderr": 0.02397386199899207, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.02397386199899207 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.45375722543352603, "acc_stderr": 0.02680372058320619, "acc_norm": 0.45375722543352603, "acc_norm_stderr": 0.02680372058320619 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3803680981595092, "acc_stderr": 0.03814269893261837, "acc_norm": 0.3803680981595092, "acc_norm_stderr": 0.03814269893261837 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4166666666666667, "acc_stderr": 0.02743162372241502, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.02743162372241502 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.43005181347150256, "acc_stderr": 0.03572954333144809, "acc_norm": 0.43005181347150256, "acc_norm_stderr": 0.03572954333144809 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.46605504587155966, "acc_stderr": 0.021387863350353996, "acc_norm": 0.46605504587155966, "acc_norm_stderr": 0.021387863350353996 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768177, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768177 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.45751633986928103, "acc_stderr": 0.028526383452142638, "acc_norm": 0.45751633986928103, "acc_norm_stderr": 0.028526383452142638 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.044492703500683815, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.044492703500683815 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.03977749934622074, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.03977749934622074 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.36437908496732024, "acc_stderr": 0.019469518221573702, "acc_norm": 0.36437908496732024, "acc_norm_stderr": 0.019469518221573702 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.28368794326241137, "acc_stderr": 0.02689170942834396, "acc_norm": 0.28368794326241137, "acc_norm_stderr": 0.02689170942834396 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028546, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028546 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321617, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321617 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808852, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808852 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4338235294117647, "acc_stderr": 0.030105636570016647, "acc_norm": 0.4338235294117647, "acc_norm_stderr": 0.030105636570016647 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4897959183673469, "acc_stderr": 0.032002553478937816, "acc_norm": 0.4897959183673469, "acc_norm_stderr": 0.032002553478937816 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5316455696202531, "acc_stderr": 0.032481974005110756, "acc_norm": 0.5316455696202531, "acc_norm_stderr": 0.032481974005110756 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.30638852672750977, "acc_stderr": 0.011773980329380694, "acc_norm": 0.30638852672750977, "acc_norm_stderr": 0.011773980329380694 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4068627450980392, "acc_stderr": 0.03447891136353382, "acc_norm": 0.4068627450980392, "acc_norm_stderr": 0.03447891136353382 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.38181818181818183, "acc_stderr": 0.03793713171165634, "acc_norm": 0.38181818181818183, "acc_norm_stderr": 0.03793713171165634 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2827417380660955, "mc1_stderr": 0.015764770836777308, "mc2": 0.4510506680374379, "mc2_stderr": 0.015547368837733567 }, "harness|ko_commongen_v2|2": { "acc": 0.42502951593860683, "acc_stderr": 0.016996016308362887, "acc_norm": 0.5360094451003542, "acc_norm_stderr": 0.017145715365486664 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "LI-ST/Mistral-7B-ko-v0.003", "model_sha": "adc7c6aed876f04edaed3bbeba7fa4fdb993091c", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }