{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.31399317406143346, "acc_stderr": 0.013562691224726291, "acc_norm": 0.35409556313993173, "acc_norm_stderr": 0.013975454122756553 }, "harness|ko_hellaswag|10": { "acc": 0.36068512248556067, "acc_stderr": 0.004792179052583444, "acc_norm": 0.45140410276837284, "acc_norm_stderr": 0.004966158142645413 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.49707602339181284, "acc_stderr": 0.03834759370936839, "acc_norm": 0.49707602339181284, "acc_norm_stderr": 0.03834759370936839 }, "harness|ko_mmlu_management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.04939291447273481, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.04939291447273481 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4342273307790549, "acc_stderr": 0.017724589389677785, "acc_norm": 0.4342273307790549, "acc_norm_stderr": 0.017724589389677785 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.040943762699967946, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.040943762699967946 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3446808510638298, "acc_stderr": 0.03106898596312215, "acc_norm": 0.3446808510638298, "acc_norm_stderr": 0.03106898596312215 }, "harness|ko_mmlu_virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.03844453181770917, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.44694533762057875, "acc_stderr": 0.02823776942208534, "acc_norm": 0.44694533762057875, "acc_norm_stderr": 0.02823776942208534 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.03219079200419994, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.03219079200419994 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3816793893129771, "acc_stderr": 0.0426073515764456, "acc_norm": 0.3816793893129771, "acc_norm_stderr": 0.0426073515764456 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.4696969696969697, "acc_stderr": 0.03555804051763929, "acc_norm": 0.4696969696969697, "acc_norm_stderr": 0.03555804051763929 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4689655172413793, "acc_stderr": 0.04158632762097828, "acc_norm": 0.4689655172413793, "acc_norm_stderr": 0.04158632762097828 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.032145368597886394, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.032145368597886394 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4, "acc_stderr": 0.024838811988033158, "acc_norm": 0.4, "acc_norm_stderr": 0.024838811988033158 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.034524539038220385, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.034524539038220385 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4064516129032258, "acc_stderr": 0.027941727346256308, "acc_norm": 0.4064516129032258, "acc_norm_stderr": 0.027941727346256308 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6452991452991453, "acc_stderr": 0.03134250486245402, "acc_norm": 0.6452991452991453, "acc_norm_stderr": 0.03134250486245402 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.35094339622641507, "acc_stderr": 0.029373646253234686, "acc_norm": 0.35094339622641507, "acc_norm_stderr": 0.029373646253234686 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131147, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131147 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3179190751445087, "acc_stderr": 0.0355068398916558, "acc_norm": 0.3179190751445087, "acc_norm_stderr": 0.0355068398916558 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.36772486772486773, "acc_stderr": 0.02483383982556242, "acc_norm": 0.36772486772486773, "acc_norm_stderr": 0.02483383982556242 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2777777777777778, "acc_stderr": 0.037455547914624555, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.037455547914624555 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.57, "acc_stderr": 0.04975698519562426, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562426 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.476878612716763, "acc_stderr": 0.026890297881303125, "acc_norm": 0.476878612716763, "acc_norm_stderr": 0.026890297881303125 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3987730061349693, "acc_stderr": 0.038470214204560246, "acc_norm": 0.3987730061349693, "acc_norm_stderr": 0.038470214204560246 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.44135802469135804, "acc_stderr": 0.027628737155668784, "acc_norm": 0.44135802469135804, "acc_norm_stderr": 0.027628737155668784 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.48186528497409326, "acc_stderr": 0.036060650018329185, "acc_norm": 0.48186528497409326, "acc_norm_stderr": 0.036060650018329185 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.42201834862385323, "acc_stderr": 0.02117499140776317, "acc_norm": 0.42201834862385323, "acc_norm_stderr": 0.02117499140776317 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4117647058823529, "acc_stderr": 0.02818059632825929, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.02818059632825929 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.039777499346220734, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.039777499346220734 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.36437908496732024, "acc_stderr": 0.019469518221573702, "acc_norm": 0.36437908496732024, "acc_norm_stderr": 0.019469518221573702 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.36879432624113473, "acc_stderr": 0.028782227561347247, "acc_norm": 0.36879432624113473, "acc_norm_stderr": 0.028782227561347247 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.375, "acc_stderr": 0.033016908987210894, "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23910614525139665, "acc_stderr": 0.014265554192331149, "acc_norm": 0.23910614525139665, "acc_norm_stderr": 0.014265554192331149 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.35661764705882354, "acc_stderr": 0.02909720956841195, "acc_norm": 0.35661764705882354, "acc_norm_stderr": 0.02909720956841195 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39591836734693875, "acc_stderr": 0.03130802899065685, "acc_norm": 0.39591836734693875, "acc_norm_stderr": 0.03130802899065685 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5358649789029536, "acc_stderr": 0.03246338898055659, "acc_norm": 0.5358649789029536, "acc_norm_stderr": 0.03246338898055659 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32920469361147325, "acc_stderr": 0.012002091666902305, "acc_norm": 0.32920469361147325, "acc_norm_stderr": 0.012002091666902305 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.39705882352941174, "acc_stderr": 0.034341311647191286, "acc_norm": 0.39705882352941174, "acc_norm_stderr": 0.034341311647191286 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.37575757575757573, "acc_stderr": 0.037818873532059816, "acc_norm": 0.37575757575757573, "acc_norm_stderr": 0.037818873532059816 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2558139534883721, "mc1_stderr": 0.015274176219283331, "mc2": 0.4259316971970392, "mc2_stderr": 0.015462913136325425 }, "harness|ko_commongen_v2|2": { "acc": 0.38488783943329397, "acc_stderr": 0.016728579701498665, "acc_norm": 0.46635182998819363, "acc_norm_stderr": 0.017151384117131876 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "LI-ST/Mistral-7B-ko-v0.10", "model_sha": "b2feae16837ddfa9402366e848700bd25c88b330", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }