|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.21416382252559726, |
|
"acc_stderr": 0.011988383205966496, |
|
"acc_norm": 0.257679180887372, |
|
"acc_norm_stderr": 0.012780770562768409 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.2524397530372436, |
|
"acc_stderr": 0.004335243434486834, |
|
"acc_norm": 0.25323640709022105, |
|
"acc_norm_stderr": 0.004339764434219064 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30994152046783624, |
|
"acc_stderr": 0.03546976959393163, |
|
"acc_norm": 0.30994152046783624, |
|
"acc_norm_stderr": 0.03546976959393163 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.22330097087378642, |
|
"acc_stderr": 0.04123553189891431, |
|
"acc_norm": 0.22330097087378642, |
|
"acc_norm_stderr": 0.04123553189891431 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24265644955300128, |
|
"acc_stderr": 0.015329888940899894, |
|
"acc_norm": 0.24265644955300128, |
|
"acc_norm_stderr": 0.015329888940899894 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.28888888888888886, |
|
"acc_stderr": 0.03915450630414251, |
|
"acc_norm": 0.28888888888888886, |
|
"acc_norm_stderr": 0.03915450630414251 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2170212765957447, |
|
"acc_stderr": 0.026947483121496228, |
|
"acc_norm": 0.2170212765957447, |
|
"acc_norm_stderr": 0.026947483121496228 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2289156626506024, |
|
"acc_stderr": 0.03270745277352477, |
|
"acc_norm": 0.2289156626506024, |
|
"acc_norm_stderr": 0.03270745277352477 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2829581993569132, |
|
"acc_stderr": 0.025583062489984824, |
|
"acc_norm": 0.2829581993569132, |
|
"acc_norm_stderr": 0.025583062489984824 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.21973094170403587, |
|
"acc_stderr": 0.027790177064383602, |
|
"acc_norm": 0.21973094170403587, |
|
"acc_norm_stderr": 0.027790177064383602 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.20610687022900764, |
|
"acc_stderr": 0.03547771004159462, |
|
"acc_norm": 0.20610687022900764, |
|
"acc_norm_stderr": 0.03547771004159462 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.25252525252525254, |
|
"acc_stderr": 0.030954055470365914, |
|
"acc_norm": 0.25252525252525254, |
|
"acc_norm_stderr": 0.030954055470365914 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2896551724137931, |
|
"acc_stderr": 0.03780019230438014, |
|
"acc_norm": 0.2896551724137931, |
|
"acc_norm_stderr": 0.03780019230438014 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.04336432707993177, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.04336432707993177 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.22268907563025211, |
|
"acc_stderr": 0.027025433498882392, |
|
"acc_norm": 0.22268907563025211, |
|
"acc_norm_stderr": 0.027025433498882392 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2564102564102564, |
|
"acc_stderr": 0.022139081103971545, |
|
"acc_norm": 0.2564102564102564, |
|
"acc_norm_stderr": 0.022139081103971545 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.04414343666854932, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.04414343666854932 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.22167487684729065, |
|
"acc_stderr": 0.029225575892489614, |
|
"acc_norm": 0.22167487684729065, |
|
"acc_norm_stderr": 0.029225575892489614 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.25161290322580643, |
|
"acc_stderr": 0.02468597928623996, |
|
"acc_norm": 0.25161290322580643, |
|
"acc_norm_stderr": 0.02468597928623996 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.029343114798094472, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.029343114798094472 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24528301886792453, |
|
"acc_stderr": 0.026480357179895702, |
|
"acc_norm": 0.24528301886792453, |
|
"acc_norm_stderr": 0.026480357179895702 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.17272727272727273, |
|
"acc_stderr": 0.03620691833929219, |
|
"acc_norm": 0.17272727272727273, |
|
"acc_norm_stderr": 0.03620691833929219 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.02684205787383371, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.02684205787383371 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.24503311258278146, |
|
"acc_stderr": 0.03511807571804724, |
|
"acc_norm": 0.24503311258278146, |
|
"acc_norm_stderr": 0.03511807571804724 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.23383084577114427, |
|
"acc_stderr": 0.02992941540834838, |
|
"acc_norm": 0.23383084577114427, |
|
"acc_norm_stderr": 0.02992941540834838 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2658959537572254, |
|
"acc_stderr": 0.03368762932259431, |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.03368762932259431 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.02141168439369418, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.02141168439369418 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.03745554791462457, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03745554791462457 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2543352601156069, |
|
"acc_stderr": 0.02344582627654554, |
|
"acc_norm": 0.2543352601156069, |
|
"acc_norm_stderr": 0.02344582627654554 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2331288343558282, |
|
"acc_stderr": 0.0332201579577674, |
|
"acc_norm": 0.2331288343558282, |
|
"acc_norm_stderr": 0.0332201579577674 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2623456790123457, |
|
"acc_stderr": 0.02447722285613511, |
|
"acc_norm": 0.2623456790123457, |
|
"acc_norm_stderr": 0.02447722285613511 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.30569948186528495, |
|
"acc_stderr": 0.033248379397581594, |
|
"acc_norm": 0.30569948186528495, |
|
"acc_norm_stderr": 0.033248379397581594 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.21834862385321102, |
|
"acc_stderr": 0.017712600528722727, |
|
"acc_norm": 0.21834862385321102, |
|
"acc_norm_stderr": 0.017712600528722727 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23015873015873015, |
|
"acc_stderr": 0.037649508797906045, |
|
"acc_norm": 0.23015873015873015, |
|
"acc_norm_stderr": 0.037649508797906045 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.023929155517351298, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.023929155517351298 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.038612291966536955, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.038612291966536955 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.256198347107438, |
|
"acc_stderr": 0.03984979653302871, |
|
"acc_norm": 0.256198347107438, |
|
"acc_norm_stderr": 0.03984979653302871 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.03523807393012047, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.03523807393012047 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2630718954248366, |
|
"acc_stderr": 0.017812676542320657, |
|
"acc_norm": 0.2630718954248366, |
|
"acc_norm_stderr": 0.017812676542320657 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2553191489361702, |
|
"acc_stderr": 0.02601199293090203, |
|
"acc_norm": 0.2553191489361702, |
|
"acc_norm_stderr": 0.02601199293090203 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.30092592592592593, |
|
"acc_stderr": 0.031280390843298825, |
|
"acc_norm": 0.30092592592592593, |
|
"acc_norm_stderr": 0.031280390843298825 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2536312849162011, |
|
"acc_stderr": 0.014551553659369916, |
|
"acc_norm": 0.2536312849162011, |
|
"acc_norm_stderr": 0.014551553659369916 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.19852941176470587, |
|
"acc_stderr": 0.024231013370541087, |
|
"acc_norm": 0.19852941176470587, |
|
"acc_norm_stderr": 0.024231013370541087 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24897959183673468, |
|
"acc_stderr": 0.02768297952296023, |
|
"acc_norm": 0.24897959183673468, |
|
"acc_norm_stderr": 0.02768297952296023 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2911392405063291, |
|
"acc_stderr": 0.029571601065753374, |
|
"acc_norm": 0.2911392405063291, |
|
"acc_norm_stderr": 0.029571601065753374 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.25358539765319427, |
|
"acc_stderr": 0.011111715336101138, |
|
"acc_norm": 0.25358539765319427, |
|
"acc_norm_stderr": 0.011111715336101138 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.03019028245350195, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.03019028245350195 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2787878787878788, |
|
"acc_stderr": 0.03501438706296781, |
|
"acc_norm": 0.2787878787878788, |
|
"acc_norm_stderr": 0.03501438706296781 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.23623011015911874, |
|
"mc1_stderr": 0.014869755015871112, |
|
"mc2": 0.49817574202268433, |
|
"mc2_stderr": 0.016860322660870557 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.09859154929577464, |
|
"acc_stderr": 0.010219175985280587, |
|
"acc_norm": 0.3955399061032864, |
|
"acc_norm_stderr": 0.016761550511163865 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "yeen214/test_llama2_ko_7b", |
|
"model_sha": "45901e1d6ccb22f5ed8aec3f9dd366823fdd1c33", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |