|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.33447098976109213, |
|
"acc_stderr": 0.013787460322441387, |
|
"acc_norm": 0.3848122866894198, |
|
"acc_norm_stderr": 0.0142183710652511 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3836885082652858, |
|
"acc_stderr": 0.0048528966817367606, |
|
"acc_norm": 0.4970125473013344, |
|
"acc_norm_stderr": 0.004989692344313999 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.29239766081871343, |
|
"acc_stderr": 0.03488647713457922, |
|
"acc_norm": 0.29239766081871343, |
|
"acc_norm_stderr": 0.03488647713457922 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.21359223300970873, |
|
"acc_stderr": 0.04058042015646034, |
|
"acc_norm": 0.21359223300970873, |
|
"acc_norm_stderr": 0.04058042015646034 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.31417624521072796, |
|
"acc_stderr": 0.01659929173588491, |
|
"acc_norm": 0.31417624521072796, |
|
"acc_norm_stderr": 0.01659929173588491 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.04171654161354543, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.04171654161354543 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.35319148936170214, |
|
"acc_stderr": 0.031245325202761926, |
|
"acc_norm": 0.35319148936170214, |
|
"acc_norm_stderr": 0.031245325202761926 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.28313253012048195, |
|
"acc_stderr": 0.03507295431370519, |
|
"acc_norm": 0.28313253012048195, |
|
"acc_norm_stderr": 0.03507295431370519 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2958199356913183, |
|
"acc_stderr": 0.025922371788818784, |
|
"acc_norm": 0.2958199356913183, |
|
"acc_norm_stderr": 0.025922371788818784 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.30493273542600896, |
|
"acc_stderr": 0.030898610882477515, |
|
"acc_norm": 0.30493273542600896, |
|
"acc_norm_stderr": 0.030898610882477515 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2900763358778626, |
|
"acc_stderr": 0.03980066246467765, |
|
"acc_norm": 0.2900763358778626, |
|
"acc_norm_stderr": 0.03980066246467765 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29797979797979796, |
|
"acc_stderr": 0.03258630383836554, |
|
"acc_norm": 0.29797979797979796, |
|
"acc_norm_stderr": 0.03258630383836554 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3724137931034483, |
|
"acc_stderr": 0.0402873153294756, |
|
"acc_norm": 0.3724137931034483, |
|
"acc_norm_stderr": 0.0402873153294756 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.04336432707993179, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.04336432707993179 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.31512605042016806, |
|
"acc_stderr": 0.030176808288974337, |
|
"acc_norm": 0.31512605042016806, |
|
"acc_norm_stderr": 0.030176808288974337 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.23076923076923078, |
|
"acc_stderr": 0.02136202772522273, |
|
"acc_norm": 0.23076923076923078, |
|
"acc_norm_stderr": 0.02136202772522273 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.0377525168068637, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.0377525168068637 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252626, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252626 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2413793103448276, |
|
"acc_stderr": 0.030108330718011625, |
|
"acc_norm": 0.2413793103448276, |
|
"acc_norm_stderr": 0.030108330718011625 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3193548387096774, |
|
"acc_stderr": 0.02652270967466777, |
|
"acc_norm": 0.3193548387096774, |
|
"acc_norm_stderr": 0.02652270967466777 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.031937057262002924, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.031937057262002924 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.22264150943396227, |
|
"acc_stderr": 0.02560423347089909, |
|
"acc_norm": 0.22264150943396227, |
|
"acc_norm_stderr": 0.02560423347089909 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.24545454545454545, |
|
"acc_stderr": 0.041220665028782834, |
|
"acc_norm": 0.24545454545454545, |
|
"acc_norm_stderr": 0.041220665028782834 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.29259259259259257, |
|
"acc_stderr": 0.02773896963217609, |
|
"acc_norm": 0.29259259259259257, |
|
"acc_norm_stderr": 0.02773896963217609 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2582781456953642, |
|
"acc_stderr": 0.035737053147634576, |
|
"acc_norm": 0.2582781456953642, |
|
"acc_norm_stderr": 0.035737053147634576 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.31840796019900497, |
|
"acc_stderr": 0.032941184790540944, |
|
"acc_norm": 0.31840796019900497, |
|
"acc_norm_stderr": 0.032941184790540944 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2832369942196532, |
|
"acc_stderr": 0.03435568056047876, |
|
"acc_norm": 0.2832369942196532, |
|
"acc_norm_stderr": 0.03435568056047876 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2804232804232804, |
|
"acc_stderr": 0.02313528797432563, |
|
"acc_norm": 0.2804232804232804, |
|
"acc_norm_stderr": 0.02313528797432563 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.03621034121889507, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.03621034121889507 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036846, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036846 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.30346820809248554, |
|
"acc_stderr": 0.02475241196091721, |
|
"acc_norm": 0.30346820809248554, |
|
"acc_norm_stderr": 0.02475241196091721 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3006134969325153, |
|
"acc_stderr": 0.03602511318806771, |
|
"acc_norm": 0.3006134969325153, |
|
"acc_norm_stderr": 0.03602511318806771 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3117283950617284, |
|
"acc_stderr": 0.025773111169630443, |
|
"acc_norm": 0.3117283950617284, |
|
"acc_norm_stderr": 0.025773111169630443 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.22797927461139897, |
|
"acc_stderr": 0.030276909945178256, |
|
"acc_norm": 0.22797927461139897, |
|
"acc_norm_stderr": 0.030276909945178256 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21052631578947367, |
|
"acc_stderr": 0.038351539543994194, |
|
"acc_norm": 0.21052631578947367, |
|
"acc_norm_stderr": 0.038351539543994194 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.24403669724770644, |
|
"acc_stderr": 0.018415286351416416, |
|
"acc_norm": 0.24403669724770644, |
|
"acc_norm_stderr": 0.018415286351416416 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03718489006818114, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03718489006818114 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3104575163398693, |
|
"acc_stderr": 0.02649303322514589, |
|
"acc_norm": 0.3104575163398693, |
|
"acc_norm_stderr": 0.02649303322514589 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.371900826446281, |
|
"acc_stderr": 0.044120158066245044, |
|
"acc_norm": 0.371900826446281, |
|
"acc_norm_stderr": 0.044120158066245044 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.34210526315789475, |
|
"acc_stderr": 0.03860731599316091, |
|
"acc_norm": 0.34210526315789475, |
|
"acc_norm_stderr": 0.03860731599316091 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.26143790849673204, |
|
"acc_stderr": 0.017776947157528037, |
|
"acc_norm": 0.26143790849673204, |
|
"acc_norm_stderr": 0.017776947157528037 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.26595744680851063, |
|
"acc_stderr": 0.026358065698880592, |
|
"acc_norm": 0.26595744680851063, |
|
"acc_norm_stderr": 0.026358065698880592 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03894641120044792, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03894641120044792 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.030546745264953185, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.030546745264953185 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421296, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421296 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.16176470588235295, |
|
"acc_stderr": 0.02236867256288675, |
|
"acc_norm": 0.16176470588235295, |
|
"acc_norm_stderr": 0.02236867256288675 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2693877551020408, |
|
"acc_stderr": 0.02840125202902294, |
|
"acc_norm": 0.2693877551020408, |
|
"acc_norm_stderr": 0.02840125202902294 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.26582278481012656, |
|
"acc_stderr": 0.028756799629658332, |
|
"acc_norm": 0.26582278481012656, |
|
"acc_norm_stderr": 0.028756799629658332 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2620599739243807, |
|
"acc_stderr": 0.011231552795890394, |
|
"acc_norm": 0.2620599739243807, |
|
"acc_norm_stderr": 0.011231552795890394 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.030778554678693264, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.030778554678693264 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3151515151515151, |
|
"acc_stderr": 0.0362773057502241, |
|
"acc_norm": 0.3151515151515151, |
|
"acc_norm_stderr": 0.0362773057502241 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24724602203182375, |
|
"mc1_stderr": 0.015102404797359649, |
|
"mc2": 0.3953129040998704, |
|
"mc2_stderr": 0.015062425593708578 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.34976525821596244, |
|
"acc_stderr": 0.016347774542860783, |
|
"acc_norm": 0.5117370892018779, |
|
"acc_norm_stderr": 0.017135056277338974 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "hyunseoki/ko-ref-llama2-7b", |
|
"model_sha": "1ee08c79ae7393473754b77e82b1472ef63d5dd2", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |