|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.295221843003413, |
|
"acc_stderr": 0.013329750293382316, |
|
"acc_norm": 0.3430034129692833, |
|
"acc_norm_stderr": 0.013872423223718167 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.37054371639115713, |
|
"acc_stderr": 0.0048196336688325495, |
|
"acc_norm": 0.46345349531965746, |
|
"acc_norm_stderr": 0.00497643438746997 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4152046783625731, |
|
"acc_stderr": 0.037792759455032, |
|
"acc_norm": 0.4152046783625731, |
|
"acc_norm_stderr": 0.037792759455032 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4368932038834951, |
|
"acc_stderr": 0.049111471073657764, |
|
"acc_norm": 0.4368932038834951, |
|
"acc_norm_stderr": 0.049111471073657764 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.39719029374201786, |
|
"acc_stderr": 0.01749790503715938, |
|
"acc_norm": 0.39719029374201786, |
|
"acc_norm_stderr": 0.01749790503715938 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.040943762699967946, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.040943762699967946 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.029241883869628813, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.029241883869628813 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3313253012048193, |
|
"acc_stderr": 0.036643147772880864, |
|
"acc_norm": 0.3313253012048193, |
|
"acc_norm_stderr": 0.036643147772880864 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.42443729903536975, |
|
"acc_stderr": 0.028071928247946205, |
|
"acc_norm": 0.42443729903536975, |
|
"acc_norm_stderr": 0.028071928247946205 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.336322869955157, |
|
"acc_stderr": 0.031708824268455, |
|
"acc_norm": 0.336322869955157, |
|
"acc_norm_stderr": 0.031708824268455 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3435114503816794, |
|
"acc_stderr": 0.041649760719448786, |
|
"acc_norm": 0.3435114503816794, |
|
"acc_norm_stderr": 0.041649760719448786 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.47474747474747475, |
|
"acc_stderr": 0.03557806245087314, |
|
"acc_norm": 0.47474747474747475, |
|
"acc_norm_stderr": 0.03557806245087314 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.31724137931034485, |
|
"acc_stderr": 0.03878352372138623, |
|
"acc_norm": 0.31724137931034485, |
|
"acc_norm_stderr": 0.03878352372138623 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171453, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3907563025210084, |
|
"acc_stderr": 0.031693802357129965, |
|
"acc_norm": 0.3907563025210084, |
|
"acc_norm_stderr": 0.031693802357129965 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.43333333333333335, |
|
"acc_stderr": 0.02512465352588513, |
|
"acc_norm": 0.43333333333333335, |
|
"acc_norm_stderr": 0.02512465352588513 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.047500773411999854, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.047500773411999854 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3842364532019704, |
|
"acc_stderr": 0.034223985656575515, |
|
"acc_norm": 0.3842364532019704, |
|
"acc_norm_stderr": 0.034223985656575515 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3870967741935484, |
|
"acc_stderr": 0.027709359675032488, |
|
"acc_norm": 0.3870967741935484, |
|
"acc_norm_stderr": 0.027709359675032488 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5299145299145299, |
|
"acc_stderr": 0.03269741106812443, |
|
"acc_norm": 0.5299145299145299, |
|
"acc_norm_stderr": 0.03269741106812443 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.37735849056603776, |
|
"acc_stderr": 0.029832808114796005, |
|
"acc_norm": 0.37735849056603776, |
|
"acc_norm_stderr": 0.029832808114796005 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.39090909090909093, |
|
"acc_stderr": 0.04673752333670238, |
|
"acc_norm": 0.39090909090909093, |
|
"acc_norm_stderr": 0.04673752333670238 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.02671924078371218, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.02671924078371218 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.25165562913907286, |
|
"acc_stderr": 0.03543304234389985, |
|
"acc_norm": 0.25165562913907286, |
|
"acc_norm_stderr": 0.03543304234389985 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.4626865671641791, |
|
"acc_stderr": 0.035256751674679745, |
|
"acc_norm": 0.4626865671641791, |
|
"acc_norm_stderr": 0.035256751674679745 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3699421965317919, |
|
"acc_stderr": 0.03681229633394319, |
|
"acc_norm": 0.3699421965317919, |
|
"acc_norm_stderr": 0.03681229633394319 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.023266512213730578, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.023266512213730578 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3472222222222222, |
|
"acc_stderr": 0.039812405437178615, |
|
"acc_norm": 0.3472222222222222, |
|
"acc_norm_stderr": 0.039812405437178615 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3583815028901734, |
|
"acc_stderr": 0.025816756791584215, |
|
"acc_norm": 0.3583815028901734, |
|
"acc_norm_stderr": 0.025816756791584215 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.36809815950920244, |
|
"acc_stderr": 0.03789213935838396, |
|
"acc_norm": 0.36809815950920244, |
|
"acc_norm_stderr": 0.03789213935838396 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.41358024691358025, |
|
"acc_stderr": 0.027402042040269952, |
|
"acc_norm": 0.41358024691358025, |
|
"acc_norm_stderr": 0.027402042040269952 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.43005181347150256, |
|
"acc_stderr": 0.03572954333144809, |
|
"acc_norm": 0.43005181347150256, |
|
"acc_norm_stderr": 0.03572954333144809 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.04227054451232199, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.04227054451232199 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.41100917431192663, |
|
"acc_stderr": 0.021095050687277638, |
|
"acc_norm": 0.41100917431192663, |
|
"acc_norm_stderr": 0.021095050687277638 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4084967320261438, |
|
"acc_stderr": 0.028146405993096358, |
|
"acc_norm": 0.4084967320261438, |
|
"acc_norm_stderr": 0.028146405993096358 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.4297520661157025, |
|
"acc_stderr": 0.04519082021319772, |
|
"acc_norm": 0.4297520661157025, |
|
"acc_norm_stderr": 0.04519082021319772 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3618421052631579, |
|
"acc_stderr": 0.03910525752849726, |
|
"acc_norm": 0.3618421052631579, |
|
"acc_norm_stderr": 0.03910525752849726 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.29248366013071897, |
|
"acc_stderr": 0.01840341571010979, |
|
"acc_norm": 0.29248366013071897, |
|
"acc_norm_stderr": 0.01840341571010979 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2872340425531915, |
|
"acc_stderr": 0.026992199173064356, |
|
"acc_norm": 0.2872340425531915, |
|
"acc_norm_stderr": 0.026992199173064356 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.038946411200447915, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.038946411200447915 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.033247089118091176, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.033247089118091176 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2905027932960894, |
|
"acc_stderr": 0.015183844307206157, |
|
"acc_norm": 0.2905027932960894, |
|
"acc_norm_stderr": 0.015183844307206157 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.40441176470588236, |
|
"acc_stderr": 0.02981263070156974, |
|
"acc_norm": 0.40441176470588236, |
|
"acc_norm_stderr": 0.02981263070156974 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.37142857142857144, |
|
"acc_stderr": 0.03093285879278984, |
|
"acc_norm": 0.37142857142857144, |
|
"acc_norm_stderr": 0.03093285879278984 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4810126582278481, |
|
"acc_stderr": 0.03252375148090447, |
|
"acc_norm": 0.4810126582278481, |
|
"acc_norm_stderr": 0.03252375148090447 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2966101694915254, |
|
"acc_stderr": 0.011665946586082844, |
|
"acc_norm": 0.2966101694915254, |
|
"acc_norm_stderr": 0.011665946586082844 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.37254901960784315, |
|
"acc_stderr": 0.03393388584958403, |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.03393388584958403 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.03825460278380026, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.03825460278380026 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2864137086903305, |
|
"mc1_stderr": 0.015826142439502346, |
|
"mc2": 0.449359001521154, |
|
"mc2_stderr": 0.016084396495163696 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.32585596221959856, |
|
"acc_stderr": 0.016114023894800336, |
|
"acc_norm": 0.3565525383707202, |
|
"acc_norm_stderr": 0.01646770698152745 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "caisarl76/Mistral-7B-orca-platy-2k-ep4", |
|
"model_sha": "fd2682689d7efd4dd350d71f64a7a8ff09842fd7", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |