|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2773037542662116, |
|
"acc_stderr": 0.013082095839059374, |
|
"acc_norm": 0.3267918088737201, |
|
"acc_norm_stderr": 0.01370666597558734 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35550687114120694, |
|
"acc_stderr": 0.004776883632722611, |
|
"acc_norm": 0.45210117506472813, |
|
"acc_norm_stderr": 0.00496683255324504 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2573099415204678, |
|
"acc_stderr": 0.03352799844161865, |
|
"acc_norm": 0.2573099415204678, |
|
"acc_norm_stderr": 0.03352799844161865 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.23300970873786409, |
|
"acc_stderr": 0.04185832598928315, |
|
"acc_norm": 0.23300970873786409, |
|
"acc_norm_stderr": 0.04185832598928315 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24904214559386972, |
|
"acc_stderr": 0.015464676163395983, |
|
"acc_norm": 0.24904214559386972, |
|
"acc_norm_stderr": 0.015464676163395983 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2074074074074074, |
|
"acc_stderr": 0.035025531706783165, |
|
"acc_norm": 0.2074074074074074, |
|
"acc_norm_stderr": 0.035025531706783165 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.32340425531914896, |
|
"acc_stderr": 0.030579442773610344, |
|
"acc_norm": 0.32340425531914896, |
|
"acc_norm_stderr": 0.030579442773610344 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.30120481927710846, |
|
"acc_stderr": 0.035716092300534796, |
|
"acc_norm": 0.30120481927710846, |
|
"acc_norm_stderr": 0.035716092300534796 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2733118971061093, |
|
"acc_stderr": 0.02531176597542612, |
|
"acc_norm": 0.2733118971061093, |
|
"acc_norm_stderr": 0.02531176597542612 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.36771300448430494, |
|
"acc_stderr": 0.032361983509282745, |
|
"acc_norm": 0.36771300448430494, |
|
"acc_norm_stderr": 0.032361983509282745 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2748091603053435, |
|
"acc_stderr": 0.03915345408847833, |
|
"acc_norm": 0.2748091603053435, |
|
"acc_norm_stderr": 0.03915345408847833 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.20707070707070707, |
|
"acc_stderr": 0.028869778460267045, |
|
"acc_norm": 0.20707070707070707, |
|
"acc_norm_stderr": 0.028869778460267045 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.22758620689655173, |
|
"acc_stderr": 0.03493950380131184, |
|
"acc_norm": 0.22758620689655173, |
|
"acc_norm_stderr": 0.03493950380131184 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617747, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617747 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.25630252100840334, |
|
"acc_stderr": 0.028359620870533946, |
|
"acc_norm": 0.25630252100840334, |
|
"acc_norm_stderr": 0.028359620870533946 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2282051282051282, |
|
"acc_stderr": 0.02127839386358628, |
|
"acc_norm": 0.2282051282051282, |
|
"acc_norm_stderr": 0.02127839386358628 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252626, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252626 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.24630541871921183, |
|
"acc_stderr": 0.03031509928561773, |
|
"acc_norm": 0.24630541871921183, |
|
"acc_norm_stderr": 0.03031509928561773 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.24838709677419354, |
|
"acc_stderr": 0.02458002892148101, |
|
"acc_norm": 0.24838709677419354, |
|
"acc_norm_stderr": 0.02458002892148101 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3034188034188034, |
|
"acc_stderr": 0.030118210106942662, |
|
"acc_norm": 0.3034188034188034, |
|
"acc_norm_stderr": 0.030118210106942662 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24150943396226415, |
|
"acc_stderr": 0.026341480371118355, |
|
"acc_norm": 0.24150943396226415, |
|
"acc_norm_stderr": 0.026341480371118355 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2636363636363636, |
|
"acc_stderr": 0.04220224692971987, |
|
"acc_norm": 0.2636363636363636, |
|
"acc_norm_stderr": 0.04220224692971987 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2814814814814815, |
|
"acc_stderr": 0.027420019350945277, |
|
"acc_norm": 0.2814814814814815, |
|
"acc_norm_stderr": 0.027420019350945277 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3576158940397351, |
|
"acc_stderr": 0.03913453431177258, |
|
"acc_norm": 0.3576158940397351, |
|
"acc_norm_stderr": 0.03913453431177258 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.26865671641791045, |
|
"acc_stderr": 0.031343283582089536, |
|
"acc_norm": 0.26865671641791045, |
|
"acc_norm_stderr": 0.031343283582089536 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.03186209851641143, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.03186209851641143 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.022644212615525218, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.022644212615525218 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.26878612716763006, |
|
"acc_stderr": 0.02386800326250011, |
|
"acc_norm": 0.26878612716763006, |
|
"acc_norm_stderr": 0.02386800326250011 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3006134969325153, |
|
"acc_stderr": 0.03602511318806771, |
|
"acc_norm": 0.3006134969325153, |
|
"acc_norm_stderr": 0.03602511318806771 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.29012345679012347, |
|
"acc_stderr": 0.025251173936495022, |
|
"acc_norm": 0.29012345679012347, |
|
"acc_norm_stderr": 0.025251173936495022 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.27979274611398963, |
|
"acc_stderr": 0.03239637046735703, |
|
"acc_norm": 0.27979274611398963, |
|
"acc_norm_stderr": 0.03239637046735703 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.23853211009174313, |
|
"acc_stderr": 0.01827257581023186, |
|
"acc_norm": 0.23853211009174313, |
|
"acc_norm_stderr": 0.01827257581023186 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.24603174603174602, |
|
"acc_stderr": 0.038522733649243156, |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.038522733649243156 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.27124183006535946, |
|
"acc_stderr": 0.02545775669666786, |
|
"acc_norm": 0.27124183006535946, |
|
"acc_norm_stderr": 0.02545775669666786 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.38016528925619836, |
|
"acc_stderr": 0.04431324501968432, |
|
"acc_norm": 0.38016528925619836, |
|
"acc_norm_stderr": 0.04431324501968432 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17105263157894737, |
|
"acc_stderr": 0.030643607071677098, |
|
"acc_norm": 0.17105263157894737, |
|
"acc_norm_stderr": 0.030643607071677098 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.26633986928104575, |
|
"acc_stderr": 0.0178831881346672, |
|
"acc_norm": 0.26633986928104575, |
|
"acc_norm_stderr": 0.0178831881346672 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2801418439716312, |
|
"acc_stderr": 0.026789172351140245, |
|
"acc_norm": 0.2801418439716312, |
|
"acc_norm_stderr": 0.026789172351140245 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697622, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697622 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.03309682581119035, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.03309682581119035 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816508, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816508 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4227941176470588, |
|
"acc_stderr": 0.030008562845003476, |
|
"acc_norm": 0.4227941176470588, |
|
"acc_norm_stderr": 0.030008562845003476 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.27346938775510204, |
|
"acc_stderr": 0.028535560337128462, |
|
"acc_norm": 0.27346938775510204, |
|
"acc_norm_stderr": 0.028535560337128462 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.24050632911392406, |
|
"acc_stderr": 0.027820781981149675, |
|
"acc_norm": 0.24050632911392406, |
|
"acc_norm_stderr": 0.027820781981149675 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.25749674054758803, |
|
"acc_stderr": 0.011167706014904154, |
|
"acc_norm": 0.25749674054758803, |
|
"acc_norm_stderr": 0.011167706014904154 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.030587591351604246, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.030587591351604246 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2787878787878788, |
|
"acc_stderr": 0.035014387062967806, |
|
"acc_norm": 0.2787878787878788, |
|
"acc_norm_stderr": 0.035014387062967806 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26805385556915545, |
|
"mc1_stderr": 0.015506204722834559, |
|
"mc2": 0.4174223008245678, |
|
"mc2_stderr": 0.014842652351856885 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2585596221959858, |
|
"acc_stderr": 0.01505335443896399, |
|
"acc_norm": 0.34710743801652894, |
|
"acc_norm_stderr": 0.016366945603281276 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v2.0", |
|
"model_sha": "2d8c315f4b602cb8c1f677de62adf07bdc6192f0", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |