{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3378839590443686, "acc_stderr": 0.01382204792228351, "acc_norm": 0.37542662116040953, "acc_norm_stderr": 0.01415063143511173 }, "harness|ko_hellaswag|10": { "acc": 0.37044413463453496, "acc_stderr": 0.004819367172685971, "acc_norm": 0.4788886675960964, "acc_norm_stderr": 0.004985331652408348 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.45614035087719296, "acc_stderr": 0.03820042586602967, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.03820042586602967 }, "harness|ko_mmlu_management|5": { "acc": 0.5048543689320388, "acc_stderr": 0.04950504382128921, "acc_norm": 0.5048543689320388, "acc_norm_stderr": 0.04950504382128921 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.49169859514687103, "acc_stderr": 0.017877498991072008, "acc_norm": 0.49169859514687103, "acc_norm_stderr": 0.017877498991072008 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.362962962962963, "acc_stderr": 0.041539484047424, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.041539484047424 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.35319148936170214, "acc_stderr": 0.031245325202761926, "acc_norm": 0.35319148936170214, "acc_norm_stderr": 0.031245325202761926 }, "harness|ko_mmlu_virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4340836012861736, "acc_stderr": 0.0281502322445356, "acc_norm": 0.4340836012861736, "acc_norm_stderr": 0.0281502322445356 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4125560538116592, "acc_stderr": 0.03304062175449297, "acc_norm": 0.4125560538116592, "acc_norm_stderr": 0.03304062175449297 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.46564885496183206, "acc_stderr": 0.04374928560599738, "acc_norm": 0.46564885496183206, "acc_norm_stderr": 0.04374928560599738 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5151515151515151, "acc_stderr": 0.0356071651653106, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.0356071651653106 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.040703290137070705, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.040703290137070705 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.038739587141493524, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.038739587141493524 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.37815126050420167, "acc_stderr": 0.031499305777849054, "acc_norm": 0.37815126050420167, "acc_norm_stderr": 0.031499305777849054 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3769230769230769, "acc_stderr": 0.024570975364225995, "acc_norm": 0.3769230769230769, "acc_norm_stderr": 0.024570975364225995 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.49074074074074076, "acc_stderr": 0.04832853553437055, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.04832853553437055 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.03413963805906235, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.03413963805906235 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.45161290322580644, "acc_stderr": 0.02831050034856839, "acc_norm": 0.45161290322580644, "acc_norm_stderr": 0.02831050034856839 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6025641025641025, "acc_stderr": 0.03205953453789293, "acc_norm": 0.6025641025641025, "acc_norm_stderr": 0.03205953453789293 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.39622641509433965, "acc_stderr": 0.03010279378179119, "acc_norm": 0.39622641509433965, "acc_norm_stderr": 0.03010279378179119 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.42727272727272725, "acc_stderr": 0.04738198703545483, "acc_norm": 0.42727272727272725, "acc_norm_stderr": 0.04738198703545483 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5373134328358209, "acc_stderr": 0.035256751674679745, "acc_norm": 0.5373134328358209, "acc_norm_stderr": 0.035256751674679745 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3352601156069364, "acc_stderr": 0.03599586301247078, "acc_norm": 0.3352601156069364, "acc_norm_stderr": 0.03599586301247078 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30687830687830686, "acc_stderr": 0.02375292871211213, "acc_norm": 0.30687830687830686, "acc_norm_stderr": 0.02375292871211213 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.47398843930635837, "acc_stderr": 0.02688264343402289, "acc_norm": 0.47398843930635837, "acc_norm_stderr": 0.02688264343402289 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.37423312883435583, "acc_stderr": 0.03802068102899615, "acc_norm": 0.37423312883435583, "acc_norm_stderr": 0.03802068102899615 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3950617283950617, "acc_stderr": 0.02720111766692566, "acc_norm": 0.3950617283950617, "acc_norm_stderr": 0.02720111766692566 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.41968911917098445, "acc_stderr": 0.035615873276858834, "acc_norm": 0.41968911917098445, "acc_norm_stderr": 0.035615873276858834 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748142, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748142 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3963302752293578, "acc_stderr": 0.020971469947900525, "acc_norm": 0.3963302752293578, "acc_norm_stderr": 0.020971469947900525 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4084967320261438, "acc_stderr": 0.028146405993096358, "acc_norm": 0.4084967320261438, "acc_norm_stderr": 0.028146405993096358 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04046336883978251, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04046336883978251 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.31699346405228757, "acc_stderr": 0.018824219512706207, "acc_norm": 0.31699346405228757, "acc_norm_stderr": 0.018824219512706207 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.32269503546099293, "acc_stderr": 0.027889139300534778, "acc_norm": 0.32269503546099293, "acc_norm_stderr": 0.027889139300534778 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2638888888888889, "acc_stderr": 0.030058202704309846, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.030058202704309846 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.29044117647058826, "acc_stderr": 0.027576468622740505, "acc_norm": 0.29044117647058826, "acc_norm_stderr": 0.027576468622740505 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4163265306122449, "acc_stderr": 0.031557828165561644, "acc_norm": 0.4163265306122449, "acc_norm_stderr": 0.031557828165561644 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5358649789029536, "acc_stderr": 0.03246338898055659, "acc_norm": 0.5358649789029536, "acc_norm_stderr": 0.03246338898055659 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3200782268578879, "acc_stderr": 0.011914791947638522, "acc_norm": 0.3200782268578879, "acc_norm_stderr": 0.011914791947638522 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.38235294117647056, "acc_stderr": 0.034107853389047184, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.034107853389047184 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4909090909090909, "acc_stderr": 0.03903698647748441, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.03903698647748441 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087312, "mc2": 0.4274629100267272, "mc2_stderr": 0.015462888327553083 }, "harness|ko_commongen_v2|2": { "acc": 0.3482880755608028, "acc_stderr": 0.016379926739148044, "acc_norm": 0.4132231404958678, "acc_norm_stderr": 0.016929480234495232 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "AIFT/PACK-13b-v1.0", "model_sha": "27f7b1eb3d926034aa90feb9ebc31788182046dd", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }