|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.20136518771331058, |
|
"acc_stderr": 0.011718927477444269, |
|
"acc_norm": 0.25170648464163825, |
|
"acc_norm_stderr": 0.012682496334042961 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.2892850029874527, |
|
"acc_stderr": 0.004525037849178839, |
|
"acc_norm": 0.32822146982672773, |
|
"acc_norm_stderr": 0.00468606242115814 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.21052631578947367, |
|
"acc_stderr": 0.0312678171466318, |
|
"acc_norm": 0.21052631578947367, |
|
"acc_norm_stderr": 0.0312678171466318 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.17475728155339806, |
|
"acc_stderr": 0.037601780060266196, |
|
"acc_norm": 0.17475728155339806, |
|
"acc_norm_stderr": 0.037601780060266196 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2707535121328225, |
|
"acc_stderr": 0.01588988836256049, |
|
"acc_norm": 0.2707535121328225, |
|
"acc_norm_stderr": 0.01588988836256049 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04072314811876837, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04072314811876837 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.18723404255319148, |
|
"acc_stderr": 0.025501588341883614, |
|
"acc_norm": 0.18723404255319148, |
|
"acc_norm_stderr": 0.025501588341883614 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.28313253012048195, |
|
"acc_stderr": 0.03507295431370518, |
|
"acc_norm": 0.28313253012048195, |
|
"acc_norm_stderr": 0.03507295431370518 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2990353697749196, |
|
"acc_stderr": 0.02600330111788514, |
|
"acc_norm": 0.2990353697749196, |
|
"acc_norm_stderr": 0.02600330111788514 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.336322869955157, |
|
"acc_stderr": 0.031708824268455, |
|
"acc_norm": 0.336322869955157, |
|
"acc_norm_stderr": 0.031708824268455 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.25190839694656486, |
|
"acc_stderr": 0.03807387116306085, |
|
"acc_norm": 0.25190839694656486, |
|
"acc_norm_stderr": 0.03807387116306085 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.029620227874790458, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.029620227874790458 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.22758620689655173, |
|
"acc_stderr": 0.03493950380131184, |
|
"acc_norm": 0.22758620689655173, |
|
"acc_norm_stderr": 0.03493950380131184 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617747, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617747 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23109243697478993, |
|
"acc_stderr": 0.02738140692786896, |
|
"acc_norm": 0.23109243697478993, |
|
"acc_norm_stderr": 0.02738140692786896 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2692307692307692, |
|
"acc_stderr": 0.022489389793654835, |
|
"acc_norm": 0.2692307692307692, |
|
"acc_norm_stderr": 0.022489389793654835 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252628, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252628 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.03255086769970103, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.03255086769970103 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2870967741935484, |
|
"acc_stderr": 0.025736542745594525, |
|
"acc_norm": 0.2870967741935484, |
|
"acc_norm_stderr": 0.025736542745594525 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.19230769230769232, |
|
"acc_stderr": 0.025819233256483727, |
|
"acc_norm": 0.19230769230769232, |
|
"acc_norm_stderr": 0.025819233256483727 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.27169811320754716, |
|
"acc_stderr": 0.027377706624670713, |
|
"acc_norm": 0.27169811320754716, |
|
"acc_norm_stderr": 0.027377706624670713 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.20909090909090908, |
|
"acc_stderr": 0.03895091015724135, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.03895091015724135 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.027309140588230196, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.027309140588230196 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389023, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389023 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24378109452736318, |
|
"acc_stderr": 0.03036049015401464, |
|
"acc_norm": 0.24378109452736318, |
|
"acc_norm_stderr": 0.03036049015401464 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.20809248554913296, |
|
"acc_stderr": 0.030952890217749884, |
|
"acc_norm": 0.20809248554913296, |
|
"acc_norm_stderr": 0.030952890217749884 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2566137566137566, |
|
"acc_stderr": 0.022494510767503154, |
|
"acc_norm": 0.2566137566137566, |
|
"acc_norm_stderr": 0.022494510767503154 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03476590104304134, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03476590104304134 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.24277456647398843, |
|
"acc_stderr": 0.0230836585869842, |
|
"acc_norm": 0.24277456647398843, |
|
"acc_norm_stderr": 0.0230836585869842 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3067484662576687, |
|
"acc_stderr": 0.03623089915724148, |
|
"acc_norm": 0.3067484662576687, |
|
"acc_norm_stderr": 0.03623089915724148 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.24691358024691357, |
|
"acc_stderr": 0.023993501709042096, |
|
"acc_norm": 0.24691358024691357, |
|
"acc_norm_stderr": 0.023993501709042096 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421296, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421296 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.23834196891191708, |
|
"acc_stderr": 0.030748905363909902, |
|
"acc_norm": 0.23834196891191708, |
|
"acc_norm_stderr": 0.030748905363909902 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.22752293577981653, |
|
"acc_stderr": 0.0179744635787765, |
|
"acc_norm": 0.22752293577981653, |
|
"acc_norm_stderr": 0.0179744635787765 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.14285714285714285, |
|
"acc_stderr": 0.031298431857438094, |
|
"acc_norm": 0.14285714285714285, |
|
"acc_norm_stderr": 0.031298431857438094 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.26143790849673204, |
|
"acc_stderr": 0.025160998214292456, |
|
"acc_norm": 0.26143790849673204, |
|
"acc_norm_stderr": 0.025160998214292456 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.24793388429752067, |
|
"acc_stderr": 0.039418975265163025, |
|
"acc_norm": 0.24793388429752067, |
|
"acc_norm_stderr": 0.039418975265163025 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.19078947368421054, |
|
"acc_stderr": 0.03197565821032501, |
|
"acc_norm": 0.19078947368421054, |
|
"acc_norm_stderr": 0.03197565821032501 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2173202614379085, |
|
"acc_stderr": 0.016684820929148594, |
|
"acc_norm": 0.2173202614379085, |
|
"acc_norm_stderr": 0.016684820929148594 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2624113475177305, |
|
"acc_stderr": 0.026244920349843007, |
|
"acc_norm": 0.2624113475177305, |
|
"acc_norm_stderr": 0.026244920349843007 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.22321428571428573, |
|
"acc_stderr": 0.03952301967702511, |
|
"acc_norm": 0.22321428571428573, |
|
"acc_norm_stderr": 0.03952301967702511 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.0340470532865388, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.0340470532865388 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24022346368715083, |
|
"acc_stderr": 0.014288343803925328, |
|
"acc_norm": 0.24022346368715083, |
|
"acc_norm_stderr": 0.014288343803925328 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.40808823529411764, |
|
"acc_stderr": 0.029855261393483924, |
|
"acc_norm": 0.40808823529411764, |
|
"acc_norm_stderr": 0.029855261393483924 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.16326530612244897, |
|
"acc_stderr": 0.023661699177098598, |
|
"acc_norm": 0.16326530612244897, |
|
"acc_norm_stderr": 0.023661699177098598 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2616033755274262, |
|
"acc_stderr": 0.028609516716994934, |
|
"acc_norm": 0.2616033755274262, |
|
"acc_norm_stderr": 0.028609516716994934 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2522816166883963, |
|
"acc_stderr": 0.011092789056875229, |
|
"acc_norm": 0.2522816166883963, |
|
"acc_norm_stderr": 0.011092789056875229 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.030587591351604243, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.030587591351604243 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.22424242424242424, |
|
"acc_stderr": 0.032568666616811015, |
|
"acc_norm": 0.22424242424242424, |
|
"acc_norm_stderr": 0.032568666616811015 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24724602203182375, |
|
"mc1_stderr": 0.015102404797359649, |
|
"mc2": 0.44204190262154125, |
|
"mc2_stderr": 0.015345648446767756 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3364817001180638, |
|
"acc_stderr": 0.016245085294386556, |
|
"acc_norm": 0.4427390791027155, |
|
"acc_norm_stderr": 0.017077254131556217 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "blueapple8259/ANHSY_test", |
|
"model_sha": "eb2f1cb1cc7a4dfab1e641fb65c64293ed14006c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |