{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.38310580204778155, "acc_stderr": 0.014206472661672877, "acc_norm": 0.4232081911262799, "acc_norm_stderr": 0.014438036220848017 }, "harness|ko_hellaswag|10": { "acc": 0.39454291973710415, "acc_stderr": 0.004877534215987089, "acc_norm": 0.5108544114718183, "acc_norm_stderr": 0.0049886054982739 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5087719298245614, "acc_stderr": 0.03834234744164993, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.03834234744164993 }, "harness|ko_mmlu_management|5": { "acc": 0.6019417475728155, "acc_stderr": 0.04846748253977238, "acc_norm": 0.6019417475728155, "acc_norm_stderr": 0.04846748253977238 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.49936143039591313, "acc_stderr": 0.01787994891443167, "acc_norm": 0.49936143039591313, "acc_norm_stderr": 0.01787994891443167 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4340425531914894, "acc_stderr": 0.032400380867927465, "acc_norm": 0.4340425531914894, "acc_norm_stderr": 0.032400380867927465 }, "harness|ko_mmlu_virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.03836722176598052, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.03836722176598052 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.47266881028938906, "acc_stderr": 0.02835563356832818, "acc_norm": 0.47266881028938906, "acc_norm_stderr": 0.02835563356832818 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.48878923766816146, "acc_stderr": 0.033549366530984746, "acc_norm": 0.48878923766816146, "acc_norm_stderr": 0.033549366530984746 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.46564885496183206, "acc_stderr": 0.043749285605997376, "acc_norm": 0.46564885496183206, "acc_norm_stderr": 0.043749285605997376 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6060606060606061, "acc_stderr": 0.03481285338232963, "acc_norm": 0.6060606060606061, "acc_norm_stderr": 0.03481285338232963 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.47478991596638653, "acc_stderr": 0.0324371805513741, "acc_norm": 0.47478991596638653, "acc_norm_stderr": 0.0324371805513741 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.46923076923076923, "acc_stderr": 0.02530295889085015, "acc_norm": 0.46923076923076923, "acc_norm_stderr": 0.02530295889085015 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956914, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956914 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4774193548387097, "acc_stderr": 0.028414985019707868, "acc_norm": 0.4774193548387097, "acc_norm_stderr": 0.028414985019707868 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7393162393162394, "acc_stderr": 0.02876034895652341, "acc_norm": 0.7393162393162394, "acc_norm_stderr": 0.02876034895652341 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.47924528301886793, "acc_stderr": 0.030746349975723463, "acc_norm": 0.47924528301886793, "acc_norm_stderr": 0.030746349975723463 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.02742001935094527, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.02742001935094527 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6169154228855721, "acc_stderr": 0.034375193373382504, "acc_norm": 0.6169154228855721, "acc_norm_stderr": 0.034375193373382504 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.42196531791907516, "acc_stderr": 0.0376574669386515, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.0376574669386515 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3835978835978836, "acc_stderr": 0.0250437573185202, "acc_norm": 0.3835978835978836, "acc_norm_stderr": 0.0250437573185202 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3819444444444444, "acc_stderr": 0.040629907841466674, "acc_norm": 0.3819444444444444, "acc_norm_stderr": 0.040629907841466674 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.04878317312145633, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145633 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5173410404624278, "acc_stderr": 0.026902900458666647, "acc_norm": 0.5173410404624278, "acc_norm_stderr": 0.026902900458666647 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.44171779141104295, "acc_stderr": 0.03901591825836184, "acc_norm": 0.44171779141104295, "acc_norm_stderr": 0.03901591825836184 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.44135802469135804, "acc_stderr": 0.027628737155668773, "acc_norm": 0.44135802469135804, "acc_norm_stderr": 0.027628737155668773 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.49740932642487046, "acc_stderr": 0.03608390745384487, "acc_norm": 0.49740932642487046, "acc_norm_stderr": 0.03608390745384487 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022057, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022057 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.48440366972477067, "acc_stderr": 0.02142689153920805, "acc_norm": 0.48440366972477067, "acc_norm_stderr": 0.02142689153920805 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5228758169934641, "acc_stderr": 0.028599936776089782, "acc_norm": 0.5228758169934641, "acc_norm_stderr": 0.028599936776089782 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.042664163633521664, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.042664163633521664 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.506578947368421, "acc_stderr": 0.040685900502249704, "acc_norm": 0.506578947368421, "acc_norm_stderr": 0.040685900502249704 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4084967320261438, "acc_stderr": 0.019886221037501862, "acc_norm": 0.4084967320261438, "acc_norm_stderr": 0.019886221037501862 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.37943262411347517, "acc_stderr": 0.028947338851614105, "acc_norm": 0.37943262411347517, "acc_norm_stderr": 0.028947338851614105 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.046355501356099754, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.046355501356099754 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.033509916046960436, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.033509916046960436 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2860335195530726, "acc_stderr": 0.015113972129062136, "acc_norm": 0.2860335195530726, "acc_norm_stderr": 0.015113972129062136 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4375, "acc_stderr": 0.030134614954403924, "acc_norm": 0.4375, "acc_norm_stderr": 0.030134614954403924 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4448979591836735, "acc_stderr": 0.03181425118197787, "acc_norm": 0.4448979591836735, "acc_norm_stderr": 0.03181425118197787 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6160337552742616, "acc_stderr": 0.031658678064106674, "acc_norm": 0.6160337552742616, "acc_norm_stderr": 0.031658678064106674 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3468057366362451, "acc_stderr": 0.012156071332318705, "acc_norm": 0.3468057366362451, "acc_norm_stderr": 0.012156071332318705 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.45588235294117646, "acc_stderr": 0.03495624522015474, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.03495624522015474 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4121212121212121, "acc_stderr": 0.03843566993588718, "acc_norm": 0.4121212121212121, "acc_norm_stderr": 0.03843566993588718 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.46200402478414904, "mc2_stderr": 0.015516827306627103 }, "harness|ko_commongen_v2|2": { "acc": 0.39433293978748524, "acc_stderr": 0.016802090674893203, "acc_norm": 0.4769775678866588, "acc_norm_stderr": 0.017172121546727634 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Ja3ck/Mistral-instruct-IPO-Y24-v1", "model_sha": "322906ac8b7dd81de714569db3848eda97d5d40f", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }