{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.30119453924914674, "acc_stderr": 0.013406741767847636, "acc_norm": 0.3464163822525597, "acc_norm_stderr": 0.013905011180063244 }, "harness|ko_hellaswag|10": { "acc": 0.35132443736307506, "acc_stderr": 0.0047640845971769034, "acc_norm": 0.4510057757418841, "acc_norm_stderr": 0.004965768348628059 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4502923976608187, "acc_stderr": 0.03815827365913236, "acc_norm": 0.4502923976608187, "acc_norm_stderr": 0.03815827365913236 }, "harness|ko_mmlu_management|5": { "acc": 0.6019417475728155, "acc_stderr": 0.04846748253977238, "acc_norm": 0.6019417475728155, "acc_norm_stderr": 0.04846748253977238 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4495530012771392, "acc_stderr": 0.017788725283507337, "acc_norm": 0.4495530012771392, "acc_norm_stderr": 0.017788725283507337 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3446808510638298, "acc_stderr": 0.031068985963122155, "acc_norm": 0.3446808510638298, "acc_norm_stderr": 0.031068985963122155 }, "harness|ko_mmlu_virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.03851597683718533, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.03851597683718533 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4790996784565916, "acc_stderr": 0.028373270961069414, "acc_norm": 0.4790996784565916, "acc_norm_stderr": 0.028373270961069414 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.452914798206278, "acc_stderr": 0.03340867501923325, "acc_norm": 0.452914798206278, "acc_norm_stderr": 0.03340867501923325 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.40458015267175573, "acc_stderr": 0.043046937953806645, "acc_norm": 0.40458015267175573, "acc_norm_stderr": 0.043046937953806645 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.42424242424242425, "acc_stderr": 0.03521224908841583, "acc_norm": 0.42424242424242425, "acc_norm_stderr": 0.03521224908841583 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4827586206896552, "acc_stderr": 0.04164188720169377, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.04164188720169377 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.04389869956808777, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.04389869956808777 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.44537815126050423, "acc_stderr": 0.0322841062671639, "acc_norm": 0.44537815126050423, "acc_norm_stderr": 0.0322841062671639 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.40512820512820513, "acc_stderr": 0.024890471769938152, "acc_norm": 0.40512820512820513, "acc_norm_stderr": 0.024890471769938152 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3888888888888889, "acc_stderr": 0.0471282125742677, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.0471282125742677 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365446 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.36451612903225805, "acc_stderr": 0.02737987122994325, "acc_norm": 0.36451612903225805, "acc_norm_stderr": 0.02737987122994325 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6452991452991453, "acc_stderr": 0.03134250486245402, "acc_norm": 0.6452991452991453, "acc_norm_stderr": 0.03134250486245402 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.35471698113207545, "acc_stderr": 0.029445175328199596, "acc_norm": 0.35471698113207545, "acc_norm_stderr": 0.029445175328199596 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.42727272727272725, "acc_stderr": 0.047381987035454834, "acc_norm": 0.42727272727272725, "acc_norm_stderr": 0.047381987035454834 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815632, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815632 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5771144278606966, "acc_stderr": 0.034932317774212816, "acc_norm": 0.5771144278606966, "acc_norm_stderr": 0.034932317774212816 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.036812296333943194, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.036812296333943194 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.37566137566137564, "acc_stderr": 0.02494236893115978, "acc_norm": 0.37566137566137564, "acc_norm_stderr": 0.02494236893115978 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.44508670520231214, "acc_stderr": 0.026756255129663765, "acc_norm": 0.44508670520231214, "acc_norm_stderr": 0.026756255129663765 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3803680981595092, "acc_stderr": 0.03814269893261837, "acc_norm": 0.3803680981595092, "acc_norm_stderr": 0.03814269893261837 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.42592592592592593, "acc_stderr": 0.027513747284379417, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.027513747284379417 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.40932642487046633, "acc_stderr": 0.03548608168860806, "acc_norm": 0.40932642487046633, "acc_norm_stderr": 0.03548608168860806 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.0383515395439942, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.0383515395439942 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.44587155963302755, "acc_stderr": 0.02131133500970858, "acc_norm": 0.44587155963302755, "acc_norm_stderr": 0.02131133500970858 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4150326797385621, "acc_stderr": 0.0282135041778241, "acc_norm": 0.4150326797385621, "acc_norm_stderr": 0.0282135041778241 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068383, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068383 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4144736842105263, "acc_stderr": 0.04008973785779205, "acc_norm": 0.4144736842105263, "acc_norm_stderr": 0.04008973785779205 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3888888888888889, "acc_stderr": 0.019722058939618068, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.019722058939618068 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3475177304964539, "acc_stderr": 0.02840662780959095, "acc_norm": 0.3475177304964539, "acc_norm_stderr": 0.02840662780959095 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.0432704093257873, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.0432704093257873 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2916666666666667, "acc_stderr": 0.03099866630456053, "acc_norm": 0.2916666666666667, "acc_norm_stderr": 0.03099866630456053 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23910614525139665, "acc_stderr": 0.014265554192331149, "acc_norm": 0.23910614525139665, "acc_norm_stderr": 0.014265554192331149 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3161764705882353, "acc_stderr": 0.028245687391462913, "acc_norm": 0.3161764705882353, "acc_norm_stderr": 0.028245687391462913 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.363265306122449, "acc_stderr": 0.030789051139030806, "acc_norm": 0.363265306122449, "acc_norm_stderr": 0.030789051139030806 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5443037974683544, "acc_stderr": 0.032419206846933335, "acc_norm": 0.5443037974683544, "acc_norm_stderr": 0.032419206846933335 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3226857887874837, "acc_stderr": 0.011940264193195974, "acc_norm": 0.3226857887874837, "acc_norm_stderr": 0.011940264193195974 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3382352941176471, "acc_stderr": 0.03320574612945432, "acc_norm": 0.3382352941176471, "acc_norm_stderr": 0.03320574612945432 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3151515151515151, "acc_stderr": 0.0362773057502241, "acc_norm": 0.3151515151515151, "acc_norm_stderr": 0.0362773057502241 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396715, "mc2": 0.4518577671193954, "mc2_stderr": 0.015379505911432577 }, "harness|ko_commongen_v2|2": { "acc": 0.42266824085005905, "acc_stderr": 0.016983506079577604, "acc_norm": 0.5171192443919717, "acc_norm_stderr": 0.01718027524608563 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "LI-ST/Mistral-7B-ko-v0.9", "model_sha": "c2ede85533e0895505871be87fc34c1906433304", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }