{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2226962457337884, "acc_stderr": 0.012158314774829931, "acc_norm": 0.2687713310580205, "acc_norm_stderr": 0.012955065963710695 }, "harness|ko_hellaswag|10": { "acc": 0.2657837084246166, "acc_stderr": 0.004408468107262734, "acc_norm": 0.2920732921728739, "acc_norm_stderr": 0.004537865171414028 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.38011695906432746, "acc_stderr": 0.03722965741385539, "acc_norm": 0.38011695906432746, "acc_norm_stderr": 0.03722965741385539 }, "harness|ko_mmlu_management|5": { "acc": 0.20388349514563106, "acc_stderr": 0.0398913985953177, "acc_norm": 0.20388349514563106, "acc_norm_stderr": 0.0398913985953177 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.280970625798212, "acc_stderr": 0.016073127851221232, "acc_norm": 0.280970625798212, "acc_norm_stderr": 0.016073127851221232 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.0335567721631314, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.0335567721631314 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2680851063829787, "acc_stderr": 0.02895734278834235, "acc_norm": 0.2680851063829787, "acc_norm_stderr": 0.02895734278834235 }, "harness|ko_mmlu_virology|5": { "acc": 0.27710843373493976, "acc_stderr": 0.03484331592680588, "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.03484331592680588 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2315112540192926, "acc_stderr": 0.023956532766639133, "acc_norm": 0.2315112540192926, "acc_norm_stderr": 0.023956532766639133 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3811659192825112, "acc_stderr": 0.03259625118416827, "acc_norm": 0.3811659192825112, "acc_norm_stderr": 0.03259625118416827 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3282442748091603, "acc_stderr": 0.04118438565806298, "acc_norm": 0.3282442748091603, "acc_norm_stderr": 0.04118438565806298 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.21717171717171718, "acc_stderr": 0.02937661648494563, "acc_norm": 0.21717171717171718, "acc_norm_stderr": 0.02937661648494563 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.25517241379310346, "acc_stderr": 0.03632984052707842, "acc_norm": 0.25517241379310346, "acc_norm_stderr": 0.03632984052707842 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179964, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179964 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2689075630252101, "acc_stderr": 0.028801392193631276, "acc_norm": 0.2689075630252101, "acc_norm_stderr": 0.028801392193631276 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.22564102564102564, "acc_stderr": 0.021193632525148533, "acc_norm": 0.22564102564102564, "acc_norm_stderr": 0.021193632525148533 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.26851851851851855, "acc_stderr": 0.04284467968052191, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.04284467968052191 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.18719211822660098, "acc_stderr": 0.027444924966882618, "acc_norm": 0.18719211822660098, "acc_norm_stderr": 0.027444924966882618 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2709677419354839, "acc_stderr": 0.02528441611490016, "acc_norm": 0.2709677419354839, "acc_norm_stderr": 0.02528441611490016 }, "harness|ko_mmlu_marketing|5": { "acc": 0.37606837606837606, "acc_stderr": 0.03173393632969482, "acc_norm": 0.37606837606837606, "acc_norm_stderr": 0.03173393632969482 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2339622641509434, "acc_stderr": 0.02605529690115292, "acc_norm": 0.2339622641509434, "acc_norm_stderr": 0.02605529690115292 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.041220665028782855, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.041220665028782855 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.1962962962962963, "acc_stderr": 0.024217421327417162, "acc_norm": 0.1962962962962963, "acc_norm_stderr": 0.024217421327417162 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23178807947019867, "acc_stderr": 0.03445406271987054, "acc_norm": 0.23178807947019867, "acc_norm_stderr": 0.03445406271987054 }, "harness|ko_mmlu_sociology|5": { "acc": 0.3383084577114428, "acc_stderr": 0.03345563070339191, "acc_norm": 0.3383084577114428, "acc_norm_stderr": 0.03345563070339191 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.1907514450867052, "acc_stderr": 0.029957851329869327, "acc_norm": 0.1907514450867052, "acc_norm_stderr": 0.029957851329869327 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.020742740560122666, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.020742740560122666 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2976878612716763, "acc_stderr": 0.024617055388676985, "acc_norm": 0.2976878612716763, "acc_norm_stderr": 0.024617055388676985 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.033519538795212696, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.033519538795212696 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.27469135802469136, "acc_stderr": 0.024836057868294677, "acc_norm": 0.27469135802469136, "acc_norm_stderr": 0.024836057868294677 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845415, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845415 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.038351539543994194, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.038351539543994194 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.22385321100917432, "acc_stderr": 0.017871217767790215, "acc_norm": 0.22385321100917432, "acc_norm_stderr": 0.017871217767790215 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.24836601307189543, "acc_stderr": 0.02473998135511359, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.02473998135511359 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2809917355371901, "acc_stderr": 0.04103203830514512, "acc_norm": 0.2809917355371901, "acc_norm_stderr": 0.04103203830514512 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.21710526315789475, "acc_stderr": 0.03355045304882924, "acc_norm": 0.21710526315789475, "acc_norm_stderr": 0.03355045304882924 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.26143790849673204, "acc_stderr": 0.017776947157528044, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.017776947157528044 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290392, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290392 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.02746740180405799, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.02746740180405799 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574892, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574892 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.20955882352941177, "acc_stderr": 0.02472311040767704, "acc_norm": 0.20955882352941177, "acc_norm_stderr": 0.02472311040767704 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2571428571428571, "acc_stderr": 0.02797982353874455, "acc_norm": 0.2571428571428571, "acc_norm_stderr": 0.02797982353874455 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.31645569620253167, "acc_stderr": 0.03027497488021897, "acc_norm": 0.31645569620253167, "acc_norm_stderr": 0.03027497488021897 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24902216427640156, "acc_stderr": 0.01104489226404077, "acc_norm": 0.24902216427640156, "acc_norm_stderr": 0.01104489226404077 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.20098039215686275, "acc_stderr": 0.028125972265654362, "acc_norm": 0.20098039215686275, "acc_norm_stderr": 0.028125972265654362 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.23030303030303031, "acc_stderr": 0.03287666758603488, "acc_norm": 0.23030303030303031, "acc_norm_stderr": 0.03287666758603488 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": 0.49334428566474076, "mc2_stderr": 0.016873715132849066 }, "harness|ko_commongen_v2|2": { "acc": 0.08146399055489964, "acc_stderr": 0.009404717441946268, "acc_norm": 0.32113341204250295, "acc_norm_stderr": 0.016052762579111562 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "jb723/LLaMA2_crosslingual_transfer_1", "model_sha": "ece29b636ef0b0c4b6d945ed66e97510b3ad6b0a", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }