{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3984641638225256, "acc_stderr": 0.014306946052735565, "acc_norm": 0.45819112627986347, "acc_norm_stderr": 0.0145602203087147 }, "harness|ko_hellaswag|10": { "acc": 0.4231228838876718, "acc_stderr": 0.004930448527146665, "acc_norm": 0.5640310695080661, "acc_norm_stderr": 0.004948696280312416 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4853801169590643, "acc_stderr": 0.038331852752130205, "acc_norm": 0.4853801169590643, "acc_norm_stderr": 0.038331852752130205 }, "harness|ko_mmlu_management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.0493929144727348, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.0493929144727348 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5542784163473818, "acc_stderr": 0.017774297282479503, "acc_norm": 0.5542784163473818, "acc_norm_stderr": 0.017774297282479503 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480863, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480863 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.40425531914893614, "acc_stderr": 0.03208115750788684, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.03208115750788684 }, "harness|ko_mmlu_virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685515, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685515 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5112540192926045, "acc_stderr": 0.028390897396863533, "acc_norm": 0.5112540192926045, "acc_norm_stderr": 0.028390897396863533 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.515695067264574, "acc_stderr": 0.0335412657542081, "acc_norm": 0.515695067264574, "acc_norm_stderr": 0.0335412657542081 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578757, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578757 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5707070707070707, "acc_stderr": 0.03526552724601198, "acc_norm": 0.5707070707070707, "acc_norm_stderr": 0.03526552724601198 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.040703290137070705, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.040703290137070705 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364395, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46218487394957986, "acc_stderr": 0.032385469487589795, "acc_norm": 0.46218487394957986, "acc_norm_stderr": 0.032385469487589795 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4358974358974359, "acc_stderr": 0.0251418015111775, "acc_norm": 0.4358974358974359, "acc_norm_stderr": 0.0251418015111775 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.49074074074074076, "acc_stderr": 0.04832853553437055, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.04832853553437055 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4870967741935484, "acc_stderr": 0.028434533152681848, "acc_norm": 0.4870967741935484, "acc_norm_stderr": 0.028434533152681848 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03088273697413866, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03088273697413866 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.47547169811320755, "acc_stderr": 0.030735822206205608, "acc_norm": 0.47547169811320755, "acc_norm_stderr": 0.030735822206205608 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5545454545454546, "acc_stderr": 0.047605488214603246, "acc_norm": 0.5545454545454546, "acc_norm_stderr": 0.047605488214603246 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844065, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844065 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389024, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389024 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5870646766169154, "acc_stderr": 0.03481520803367348, "acc_norm": 0.5870646766169154, "acc_norm_stderr": 0.03481520803367348 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4046242774566474, "acc_stderr": 0.0374246119388725, "acc_norm": 0.4046242774566474, "acc_norm_stderr": 0.0374246119388725 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.02293097307163335, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.02293097307163335 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5144508670520231, "acc_stderr": 0.026907849856282532, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.026907849856282532 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443, "acc_norm": 0.50920245398773, "acc_norm_stderr": 0.03927705600787443 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.49382716049382713, "acc_stderr": 0.02781862396258329, "acc_norm": 0.49382716049382713, "acc_norm_stderr": 0.02781862396258329 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5854922279792746, "acc_stderr": 0.035553003195576686, "acc_norm": 0.5854922279792746, "acc_norm_stderr": 0.035553003195576686 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5706422018348624, "acc_stderr": 0.021222286397236508, "acc_norm": 0.5706422018348624, "acc_norm_stderr": 0.021222286397236508 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147127, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147127 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3954248366013072, "acc_stderr": 0.027996723180631466, "acc_norm": 0.3954248366013072, "acc_norm_stderr": 0.027996723180631466 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.043457245702925335, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.043457245702925335 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.03988903703336284, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.03988903703336284 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.37745098039215685, "acc_stderr": 0.019610851474880283, "acc_norm": 0.37745098039215685, "acc_norm_stderr": 0.019610851474880283 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3617021276595745, "acc_stderr": 0.028663820147199492, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.028663820147199492 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.03167468706828979, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.03167468706828979 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4007352941176471, "acc_stderr": 0.029768263528933105, "acc_norm": 0.4007352941176471, "acc_norm_stderr": 0.029768263528933105 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.49795918367346936, "acc_stderr": 0.0320089533497105, "acc_norm": 0.49795918367346936, "acc_norm_stderr": 0.0320089533497105 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5991561181434599, "acc_stderr": 0.031900803894732356, "acc_norm": 0.5991561181434599, "acc_norm_stderr": 0.031900803894732356 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.34028683181225555, "acc_stderr": 0.012101217610223798, "acc_norm": 0.34028683181225555, "acc_norm_stderr": 0.012101217610223798 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5245098039215687, "acc_stderr": 0.035050931943487976, "acc_norm": 0.5245098039215687, "acc_norm_stderr": 0.035050931943487976 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.49696969696969695, "acc_stderr": 0.03904272341431856, "acc_norm": 0.49696969696969695, "acc_norm_stderr": 0.03904272341431856 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3011015911872705, "mc1_stderr": 0.01605899902610062, "mc2": 0.45367177115043, "mc2_stderr": 0.015134250403335572 }, "harness|ko_commongen_v2|2": { "acc": 0.43919716646989376, "acc_stderr": 0.0170627757447807, "acc_norm": 0.5466351829988194, "acc_norm_stderr": 0.01711541822522687 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Kaeri-Jenti/Llama-2-kor-13B", "model_sha": "de4f458a28b96221babb7655c994221ea3d27c6f", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }