{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3361774744027304, "acc_stderr": 0.013804855026205761, "acc_norm": 0.38139931740614336, "acc_norm_stderr": 0.014194389086685261 }, "harness|ko_hellaswag|10": { "acc": 0.3724357697669787, "acc_stderr": 0.004824655406075561, "acc_norm": 0.48078072097191793, "acc_norm_stderr": 0.004986093791041656 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.45614035087719296, "acc_stderr": 0.03820042586602966, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.03820042586602966 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.049505043821289195, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.049505043821289195 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4891443167305236, "acc_stderr": 0.017875748840242414, "acc_norm": 0.4891443167305236, "acc_norm_stderr": 0.017875748840242414 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3574468085106383, "acc_stderr": 0.03132941789476425, "acc_norm": 0.3574468085106383, "acc_norm_stderr": 0.03132941789476425 }, "harness|ko_mmlu_virology|5": { "acc": 0.35542168674698793, "acc_stderr": 0.037262143543224144, "acc_norm": 0.35542168674698793, "acc_norm_stderr": 0.037262143543224144 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4340836012861736, "acc_stderr": 0.0281502322445356, "acc_norm": 0.4340836012861736, "acc_norm_stderr": 0.0281502322445356 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3811659192825112, "acc_stderr": 0.03259625118416827, "acc_norm": 0.3811659192825112, "acc_norm_stderr": 0.03259625118416827 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5050505050505051, "acc_stderr": 0.035621707606254015, "acc_norm": 0.5050505050505051, "acc_norm_stderr": 0.035621707606254015 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.041042692118062316, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.041042692118062316 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.0379328118530781, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.0379328118530781 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3697478991596639, "acc_stderr": 0.031357095996135904, "acc_norm": 0.3697478991596639, "acc_norm_stderr": 0.031357095996135904 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.02435958146539696, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.02435958146539696 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.37438423645320196, "acc_stderr": 0.03405155380561952, "acc_norm": 0.37438423645320196, "acc_norm_stderr": 0.03405155380561952 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.44516129032258067, "acc_stderr": 0.02827241018621491, "acc_norm": 0.44516129032258067, "acc_norm_stderr": 0.02827241018621491 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6196581196581197, "acc_stderr": 0.03180425204384099, "acc_norm": 0.6196581196581197, "acc_norm_stderr": 0.03180425204384099 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3622641509433962, "acc_stderr": 0.029582245128384296, "acc_norm": 0.3622641509433962, "acc_norm_stderr": 0.029582245128384296 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.04724577405731571, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.04724577405731571 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.02773896963217609, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.02773896963217609 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.037579499229433426, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.037579499229433426 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5174129353233831, "acc_stderr": 0.03533389234739245, "acc_norm": 0.5174129353233831, "acc_norm_stderr": 0.03533389234739245 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.32947976878612717, "acc_stderr": 0.03583901754736412, "acc_norm": 0.32947976878612717, "acc_norm_stderr": 0.03583901754736412 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.023695415009463087, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.023695415009463087 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.47398843930635837, "acc_stderr": 0.02688264343402289, "acc_norm": 0.47398843930635837, "acc_norm_stderr": 0.02688264343402289 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3803680981595092, "acc_stderr": 0.038142698932618374, "acc_norm": 0.3803680981595092, "acc_norm_stderr": 0.038142698932618374 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.39814814814814814, "acc_stderr": 0.027237415094592474, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.027237415094592474 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.43523316062176165, "acc_stderr": 0.03578038165008586, "acc_norm": 0.43523316062176165, "acc_norm_stderr": 0.03578038165008586 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022057, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022057 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3963302752293578, "acc_stderr": 0.02097146994790053, "acc_norm": 0.3963302752293578, "acc_norm_stderr": 0.02097146994790053 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3888888888888889, "acc_stderr": 0.02791405551046802, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.02791405551046802 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068383, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068383 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4342105263157895, "acc_stderr": 0.04033565667848319, "acc_norm": 0.4342105263157895, "acc_norm_stderr": 0.04033565667848319 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.31862745098039214, "acc_stderr": 0.01885008469646872, "acc_norm": 0.31862745098039214, "acc_norm_stderr": 0.01885008469646872 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3120567375886525, "acc_stderr": 0.027640120545169945, "acc_norm": 0.3120567375886525, "acc_norm_stderr": 0.027640120545169945 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.041577515398656284, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.041577515398656284 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2824074074074074, "acc_stderr": 0.03070137211151092, "acc_norm": 0.2824074074074074, "acc_norm_stderr": 0.03070137211151092 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.01435591196476786, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.01435591196476786 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.29411764705882354, "acc_stderr": 0.027678468642144686, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.027678468642144686 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4163265306122449, "acc_stderr": 0.031557828165561644, "acc_norm": 0.4163265306122449, "acc_norm_stderr": 0.031557828165561644 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.4936708860759494, "acc_stderr": 0.032544620107678585, "acc_norm": 0.4936708860759494, "acc_norm_stderr": 0.032544620107678585 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.30182529335071706, "acc_stderr": 0.01172435051810589, "acc_norm": 0.30182529335071706, "acc_norm_stderr": 0.01172435051810589 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.37745098039215685, "acc_stderr": 0.03402272044340704, "acc_norm": 0.37745098039215685, "acc_norm_stderr": 0.03402272044340704 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.503030303030303, "acc_stderr": 0.03904272341431856, "acc_norm": 0.503030303030303, "acc_norm_stderr": 0.03904272341431856 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087307, "mc2": 0.42195295057052135, "mc2_stderr": 0.015423294021851608 }, "harness|ko_commongen_v2|2": { "acc": 0.3317591499409681, "acc_stderr": 0.016187984642157312, "acc_norm": 0.3955135773317591, "acc_norm_stderr": 0.01681081590220604 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "AIFT/PACK-13b-v1.1", "model_sha": "a547563032d1b762d80a80959f9b00aefab44eb5", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }