{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3225255972696246, "acc_stderr": 0.013659980894277373, "acc_norm": 0.35921501706484643, "acc_norm_stderr": 0.014020224155839155 }, "harness|ko_hellaswag|10": { "acc": 0.3604859589723163, "acc_stderr": 0.004791601975612767, "acc_norm": 0.4561840270862378, "acc_norm_stderr": 0.0049705853282976204 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.0383161053282193, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.0383161053282193 }, "harness|ko_mmlu_management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.04939291447273482, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.04939291447273482 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4393358876117497, "acc_stderr": 0.017747874245683616, "acc_norm": 0.4393358876117497, "acc_norm_stderr": 0.017747874245683616 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4074074074074074, "acc_stderr": 0.042446332383532286, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.042446332383532286 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3702127659574468, "acc_stderr": 0.03156564682236784, "acc_norm": 0.3702127659574468, "acc_norm_stderr": 0.03156564682236784 }, "harness|ko_mmlu_virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.03844453181770917, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.42443729903536975, "acc_stderr": 0.028071928247946205, "acc_norm": 0.42443729903536975, "acc_norm_stderr": 0.028071928247946205 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.40358744394618834, "acc_stderr": 0.03292802819330315, "acc_norm": 0.40358744394618834, "acc_norm_stderr": 0.03292802819330315 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4351145038167939, "acc_stderr": 0.043482080516448585, "acc_norm": 0.4351145038167939, "acc_norm_stderr": 0.043482080516448585 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.494949494949495, "acc_stderr": 0.035621707606254015, "acc_norm": 0.494949494949495, "acc_norm_stderr": 0.035621707606254015 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4413793103448276, "acc_stderr": 0.041379310344827586, "acc_norm": 0.4413793103448276, "acc_norm_stderr": 0.041379310344827586 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.04336432707993179, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.04336432707993179 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4831932773109244, "acc_stderr": 0.03246013680375308, "acc_norm": 0.4831932773109244, "acc_norm_stderr": 0.03246013680375308 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4717948717948718, "acc_stderr": 0.02531063925493391, "acc_norm": 0.4717948717948718, "acc_norm_stderr": 0.02531063925493391 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.56, "acc_stderr": 0.0498887651569859, "acc_norm": 0.56, "acc_norm_stderr": 0.0498887651569859 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.46296296296296297, "acc_stderr": 0.04820403072760627, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.04820403072760627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.034524539038220385, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.034524539038220385 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4612903225806452, "acc_stderr": 0.02835863485983692, "acc_norm": 0.4612903225806452, "acc_norm_stderr": 0.02835863485983692 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6794871794871795, "acc_stderr": 0.030572811310299607, "acc_norm": 0.6794871794871795, "acc_norm_stderr": 0.030572811310299607 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4, "acc_stderr": 0.030151134457776296, "acc_norm": 0.4, "acc_norm_stderr": 0.030151134457776296 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.44545454545454544, "acc_stderr": 0.04760548821460325, "acc_norm": 0.44545454545454544, "acc_norm_stderr": 0.04760548821460325 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948492, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948492 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6019900497512438, "acc_stderr": 0.03461199429040013, "acc_norm": 0.6019900497512438, "acc_norm_stderr": 0.03461199429040013 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895537, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895537 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.35714285714285715, "acc_stderr": 0.024677862841332783, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.024677862841332783 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3541666666666667, "acc_stderr": 0.039994111357535424, "acc_norm": 0.3541666666666667, "acc_norm_stderr": 0.039994111357535424 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4508670520231214, "acc_stderr": 0.026788811931562753, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.026788811931562753 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5214723926380368, "acc_stderr": 0.03924746876751129, "acc_norm": 0.5214723926380368, "acc_norm_stderr": 0.03924746876751129 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.42901234567901236, "acc_stderr": 0.027538925613470867, "acc_norm": 0.42901234567901236, "acc_norm_stderr": 0.027538925613470867 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.45595854922279794, "acc_stderr": 0.03594413711272437, "acc_norm": 0.45595854922279794, "acc_norm_stderr": 0.03594413711272437 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.044045561573747685, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.044045561573747685 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.47155963302752296, "acc_stderr": 0.021402615697348044, "acc_norm": 0.47155963302752296, "acc_norm_stderr": 0.021402615697348044 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574925, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574925 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.43790849673202614, "acc_stderr": 0.028408302020332687, "acc_norm": 0.43790849673202614, "acc_norm_stderr": 0.028408302020332687 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6611570247933884, "acc_stderr": 0.04320767807536669, "acc_norm": 0.6611570247933884, "acc_norm_stderr": 0.04320767807536669 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.34868421052631576, "acc_stderr": 0.038781398887976104, "acc_norm": 0.34868421052631576, "acc_norm_stderr": 0.038781398887976104 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.35130718954248363, "acc_stderr": 0.01931267606578656, "acc_norm": 0.35130718954248363, "acc_norm_stderr": 0.01931267606578656 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02812163604063988, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02812163604063988 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252335, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252335 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.31508379888268156, "acc_stderr": 0.015536850852473638, "acc_norm": 0.31508379888268156, "acc_norm_stderr": 0.015536850852473638 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939098, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939098 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4117647058823529, "acc_stderr": 0.029896163033125474, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.029896163033125474 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4857142857142857, "acc_stderr": 0.031996152328062875, "acc_norm": 0.4857142857142857, "acc_norm_stderr": 0.031996152328062875 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5485232067510548, "acc_stderr": 0.03239360017397471, "acc_norm": 0.5485232067510548, "acc_norm_stderr": 0.03239360017397471 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.30964797913950454, "acc_stderr": 0.011808598262503318, "acc_norm": 0.30964797913950454, "acc_norm_stderr": 0.011808598262503318 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.43137254901960786, "acc_stderr": 0.03476099060501636, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.03476099060501636 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3939393939393939, "acc_stderr": 0.03815494308688929, "acc_norm": 0.3939393939393939, "acc_norm_stderr": 0.03815494308688929 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2974296205630355, "mc1_stderr": 0.016002651487361012, "mc2": 0.4812011290103991, "mc2_stderr": 0.015609821540397258 }, "harness|ko_commongen_v2|2": { "acc": 0.4049586776859504, "acc_stderr": 0.01687694116504561, "acc_norm": 0.49940968122786306, "acc_norm_stderr": 0.017190342123448665 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "NousResearch/Yarn-Mistral-7b-64k", "model_sha": "0273c624561fcecc8e8f4030492a9307aa60f945", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }