{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2226962457337884, "acc_stderr": 0.012158314774829948, "acc_norm": 0.2627986348122867, "acc_norm_stderr": 0.012862523175351331 }, "harness|ko_hellaswag|10": { "acc": 0.2726548496315475, "acc_stderr": 0.004444146875436292, "acc_norm": 0.29635530770762797, "acc_norm_stderr": 0.004557163175885563 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2982456140350877, "acc_stderr": 0.03508771929824561, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.03508771929824561 }, "harness|ko_mmlu_management|5": { "acc": 0.3106796116504854, "acc_stderr": 0.04582124160161549, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.04582124160161549 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2848020434227331, "acc_stderr": 0.016139174096522553, "acc_norm": 0.2848020434227331, "acc_norm_stderr": 0.016139174096522553 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066654, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066654 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3276595744680851, "acc_stderr": 0.030683020843231008, "acc_norm": 0.3276595744680851, "acc_norm_stderr": 0.030683020843231008 }, "harness|ko_mmlu_virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3440514469453376, "acc_stderr": 0.026981478043648026, "acc_norm": 0.3440514469453376, "acc_norm_stderr": 0.026981478043648026 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.34977578475336324, "acc_stderr": 0.03200736719484503, "acc_norm": 0.34977578475336324, "acc_norm_stderr": 0.03200736719484503 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.037276735755969195, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.037276735755969195 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.22727272727272727, "acc_stderr": 0.029857515673386417, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.029857515673386417 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.31724137931034485, "acc_stderr": 0.038783523721386215, "acc_norm": 0.31724137931034485, "acc_norm_stderr": 0.038783523721386215 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307811, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307811 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2689075630252101, "acc_stderr": 0.028801392193631276, "acc_norm": 0.2689075630252101, "acc_norm_stderr": 0.028801392193631276 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2512820512820513, "acc_stderr": 0.021992016662370568, "acc_norm": 0.2512820512820513, "acc_norm_stderr": 0.021992016662370568 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3611111111111111, "acc_stderr": 0.04643454608906275, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.04643454608906275 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.030903796952114454, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.030903796952114454 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2709677419354839, "acc_stderr": 0.02528441611490016, "acc_norm": 0.2709677419354839, "acc_norm_stderr": 0.02528441611490016 }, "harness|ko_mmlu_marketing|5": { "acc": 0.3418803418803419, "acc_stderr": 0.03107502852650775, "acc_norm": 0.3418803418803419, "acc_norm_stderr": 0.03107502852650775 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2943396226415094, "acc_stderr": 0.028049186315695248, "acc_norm": 0.2943396226415094, "acc_norm_stderr": 0.028049186315695248 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3181818181818182, "acc_stderr": 0.044612721759105065, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.044612721759105065 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871927, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871927 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|ko_mmlu_sociology|5": { "acc": 0.38308457711442784, "acc_stderr": 0.034375193373382504, "acc_norm": 0.38308457711442784, "acc_norm_stderr": 0.034375193373382504 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818318, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818318 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.022930973071633345, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.022930973071633345 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.315028901734104, "acc_stderr": 0.025009313790069692, "acc_norm": 0.315028901734104, "acc_norm_stderr": 0.025009313790069692 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3006134969325153, "acc_stderr": 0.03602511318806771, "acc_norm": 0.3006134969325153, "acc_norm_stderr": 0.03602511318806771 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.30864197530864196, "acc_stderr": 0.02570264026060375, "acc_norm": 0.30864197530864196, "acc_norm_stderr": 0.02570264026060375 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3160621761658031, "acc_stderr": 0.03355397369686173, "acc_norm": 0.3160621761658031, "acc_norm_stderr": 0.03355397369686173 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.041857744240220575, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.041857744240220575 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.23853211009174313, "acc_stderr": 0.01827257581023186, "acc_norm": 0.23853211009174313, "acc_norm_stderr": 0.01827257581023186 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2908496732026144, "acc_stderr": 0.026004800363952113, "acc_norm": 0.2908496732026144, "acc_norm_stderr": 0.026004800363952113 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|ko_mmlu_international_law|5": { "acc": 0.512396694214876, "acc_stderr": 0.04562951548180765, "acc_norm": 0.512396694214876, "acc_norm_stderr": 0.04562951548180765 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2236842105263158, "acc_stderr": 0.03391160934343604, "acc_norm": 0.2236842105263158, "acc_norm_stderr": 0.03391160934343604 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2696078431372549, "acc_stderr": 0.017952449196987866, "acc_norm": 0.2696078431372549, "acc_norm_stderr": 0.017952449196987866 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880585, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880585 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.043270409325787296, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.043270409325787296 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.19907407407407407, "acc_stderr": 0.027232298462690218, "acc_norm": 0.19907407407407407, "acc_norm_stderr": 0.027232298462690218 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.22058823529411764, "acc_stderr": 0.02518778666022727, "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.02518778666022727 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.20408163265306123, "acc_stderr": 0.025801283475090496, "acc_norm": 0.20408163265306123, "acc_norm_stderr": 0.025801283475090496 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.3037974683544304, "acc_stderr": 0.029936696387138598, "acc_norm": 0.3037974683544304, "acc_norm_stderr": 0.029936696387138598 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24837027379400262, "acc_stderr": 0.011035212598034494, "acc_norm": 0.24837027379400262, "acc_norm_stderr": 0.011035212598034494 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591362, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591362 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.033464098810559534, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.033464098810559534 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2594859241126071, "mc1_stderr": 0.015345409485557966, "mc2": 0.43443146146429873, "mc2_stderr": 0.01580310882533787 }, "harness|ko_commongen_v2|2": { "acc": 0.11452184179456906, "acc_stderr": 0.010948330698808925, "acc_norm": 0.1959858323494687, "acc_norm_stderr": 0.013647685567768858 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "jb723/llama2-ko-7B-model", "model_sha": "24e455bbf4039f360a37833583c335582d2c6030", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }