|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.35409556313993173, |
|
"acc_stderr": 0.013975454122756557, |
|
"acc_norm": 0.4129692832764505, |
|
"acc_norm_stderr": 0.014388344935398322 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3813981278629755, |
|
"acc_stderr": 0.004847372670134637, |
|
"acc_norm": 0.48954391555467036, |
|
"acc_norm_stderr": 0.00498869022950566 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5263157894736842, |
|
"acc_stderr": 0.03829509868994727, |
|
"acc_norm": 0.5263157894736842, |
|
"acc_norm_stderr": 0.03829509868994727 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6116504854368932, |
|
"acc_stderr": 0.04825729337356389, |
|
"acc_norm": 0.6116504854368932, |
|
"acc_norm_stderr": 0.04825729337356389 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4776500638569604, |
|
"acc_stderr": 0.017862091778507876, |
|
"acc_norm": 0.4776500638569604, |
|
"acc_norm_stderr": 0.017862091778507876 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4148148148148148, |
|
"acc_stderr": 0.042561937679014075, |
|
"acc_norm": 0.4148148148148148, |
|
"acc_norm_stderr": 0.042561937679014075 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.37872340425531914, |
|
"acc_stderr": 0.03170995606040655, |
|
"acc_norm": 0.37872340425531914, |
|
"acc_norm_stderr": 0.03170995606040655 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.45180722891566266, |
|
"acc_stderr": 0.03874371556587953, |
|
"acc_norm": 0.45180722891566266, |
|
"acc_norm_stderr": 0.03874371556587953 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4662379421221865, |
|
"acc_stderr": 0.028333277109562786, |
|
"acc_norm": 0.4662379421221865, |
|
"acc_norm_stderr": 0.028333277109562786 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.45739910313901344, |
|
"acc_stderr": 0.033435777055830646, |
|
"acc_norm": 0.45739910313901344, |
|
"acc_norm_stderr": 0.033435777055830646 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3969465648854962, |
|
"acc_stderr": 0.04291135671009225, |
|
"acc_norm": 0.3969465648854962, |
|
"acc_norm_stderr": 0.04291135671009225 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5252525252525253, |
|
"acc_stderr": 0.03557806245087314, |
|
"acc_norm": 0.5252525252525253, |
|
"acc_norm_stderr": 0.03557806245087314 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.04144311810878151, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.17647058823529413, |
|
"acc_stderr": 0.03793281185307809, |
|
"acc_norm": 0.17647058823529413, |
|
"acc_norm_stderr": 0.03793281185307809 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4789915966386555, |
|
"acc_stderr": 0.03244980849990029, |
|
"acc_norm": 0.4789915966386555, |
|
"acc_norm_stderr": 0.03244980849990029 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4307692307692308, |
|
"acc_stderr": 0.02510682066053975, |
|
"acc_norm": 0.4307692307692308, |
|
"acc_norm_stderr": 0.02510682066053975 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3645320197044335, |
|
"acc_stderr": 0.0338640574606209, |
|
"acc_norm": 0.3645320197044335, |
|
"acc_norm_stderr": 0.0338640574606209 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5032258064516129, |
|
"acc_stderr": 0.028443414226438316, |
|
"acc_norm": 0.5032258064516129, |
|
"acc_norm_stderr": 0.028443414226438316 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6709401709401709, |
|
"acc_stderr": 0.03078232157768818, |
|
"acc_norm": 0.6709401709401709, |
|
"acc_norm_stderr": 0.03078232157768818 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4490566037735849, |
|
"acc_stderr": 0.030612730713641095, |
|
"acc_norm": 0.4490566037735849, |
|
"acc_norm_stderr": 0.030612730713641095 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4909090909090909, |
|
"acc_stderr": 0.04788339768702862, |
|
"acc_norm": 0.4909090909090909, |
|
"acc_norm_stderr": 0.04788339768702862 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.028742040903948496, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.028742040903948496 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6218905472636815, |
|
"acc_stderr": 0.03428867848778657, |
|
"acc_norm": 0.6218905472636815, |
|
"acc_norm_stderr": 0.03428867848778657 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.03669072477416907, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.03669072477416907 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.37566137566137564, |
|
"acc_stderr": 0.024942368931159795, |
|
"acc_norm": 0.37566137566137564, |
|
"acc_norm_stderr": 0.024942368931159795 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.037738099906869334, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.037738099906869334 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.04943110704237101, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.04943110704237101 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49421965317919075, |
|
"acc_stderr": 0.026917296179149123, |
|
"acc_norm": 0.49421965317919075, |
|
"acc_norm_stderr": 0.026917296179149123 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4294478527607362, |
|
"acc_stderr": 0.03889066619112722, |
|
"acc_norm": 0.4294478527607362, |
|
"acc_norm_stderr": 0.03889066619112722 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4351851851851852, |
|
"acc_stderr": 0.027586006221607718, |
|
"acc_norm": 0.4351851851851852, |
|
"acc_norm_stderr": 0.027586006221607718 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.48704663212435234, |
|
"acc_stderr": 0.03607228061047749, |
|
"acc_norm": 0.48704663212435234, |
|
"acc_norm_stderr": 0.03607228061047749 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.32456140350877194, |
|
"acc_stderr": 0.044045561573747685, |
|
"acc_norm": 0.32456140350877194, |
|
"acc_norm_stderr": 0.044045561573747685 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5009174311926605, |
|
"acc_stderr": 0.021437287056051215, |
|
"acc_norm": 0.5009174311926605, |
|
"acc_norm_stderr": 0.021437287056051215 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04285714285714281, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04285714285714281 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4869281045751634, |
|
"acc_stderr": 0.028620130800700246, |
|
"acc_norm": 0.4869281045751634, |
|
"acc_norm_stderr": 0.028620130800700246 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5950413223140496, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.34868421052631576, |
|
"acc_stderr": 0.03878139888797609, |
|
"acc_norm": 0.34868421052631576, |
|
"acc_norm_stderr": 0.03878139888797609 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3741830065359477, |
|
"acc_stderr": 0.01957695312208884, |
|
"acc_norm": 0.3741830065359477, |
|
"acc_norm_stderr": 0.01957695312208884 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.028663820147199492, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.028663820147199492 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.41964285714285715, |
|
"acc_stderr": 0.046840993210771065, |
|
"acc_norm": 0.41964285714285715, |
|
"acc_norm_stderr": 0.046840993210771065 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.03309682581119035, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.03309682581119035 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2022346368715084, |
|
"acc_stderr": 0.01343372948332099, |
|
"acc_norm": 0.2022346368715084, |
|
"acc_norm_stderr": 0.01343372948332099 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3492647058823529, |
|
"acc_stderr": 0.02895975519682487, |
|
"acc_norm": 0.3492647058823529, |
|
"acc_norm_stderr": 0.02895975519682487 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.46122448979591835, |
|
"acc_stderr": 0.03191282052669277, |
|
"acc_norm": 0.46122448979591835, |
|
"acc_norm_stderr": 0.03191282052669277 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5611814345991561, |
|
"acc_stderr": 0.032302649315470375, |
|
"acc_norm": 0.5611814345991561, |
|
"acc_norm_stderr": 0.032302649315470375 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.31029986962190353, |
|
"acc_stderr": 0.011815439293469829, |
|
"acc_norm": 0.31029986962190353, |
|
"acc_norm_stderr": 0.011815439293469829 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.49019607843137253, |
|
"acc_stderr": 0.03508637358630572, |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.03508637358630572 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.03888176921674099, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.03888176921674099 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3023255813953488, |
|
"mc1_stderr": 0.016077509266133033, |
|
"mc2": 0.4750791587895867, |
|
"mc2_stderr": 0.015736885636484024 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.44037780401416765, |
|
"acc_stderr": 0.01706769977431298, |
|
"acc_norm": 0.5041322314049587, |
|
"acc_norm_stderr": 0.01718976703213082 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover", |
|
"model_sha": "6a36ede83f774993cca1e5193c0c702e4b998676", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |