|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3046075085324232, |
|
"acc_stderr": 0.013449522109932492, |
|
"acc_norm": 0.363481228668942, |
|
"acc_norm_stderr": 0.014056207319068282 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3981278629755029, |
|
"acc_stderr": 0.0048851164655502755, |
|
"acc_norm": 0.5159330810595499, |
|
"acc_norm_stderr": 0.004987247325495624 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03218093795602357, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03218093795602357 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.24271844660194175, |
|
"acc_stderr": 0.04245022486384495, |
|
"acc_norm": 0.24271844660194175, |
|
"acc_norm_stderr": 0.04245022486384495 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.23754789272030652, |
|
"acc_stderr": 0.015218733046150193, |
|
"acc_norm": 0.23754789272030652, |
|
"acc_norm_stderr": 0.015218733046150193 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.28888888888888886, |
|
"acc_stderr": 0.039154506304142495, |
|
"acc_norm": 0.28888888888888886, |
|
"acc_norm_stderr": 0.039154506304142495 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2297872340425532, |
|
"acc_stderr": 0.02750175294441242, |
|
"acc_norm": 0.2297872340425532, |
|
"acc_norm_stderr": 0.02750175294441242 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2469879518072289, |
|
"acc_stderr": 0.03357351982064536, |
|
"acc_norm": 0.2469879518072289, |
|
"acc_norm_stderr": 0.03357351982064536 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.33440514469453375, |
|
"acc_stderr": 0.026795422327893944, |
|
"acc_norm": 0.33440514469453375, |
|
"acc_norm_stderr": 0.026795422327893944 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.1031390134529148, |
|
"acc_stderr": 0.020412564289839272, |
|
"acc_norm": 0.1031390134529148, |
|
"acc_norm_stderr": 0.020412564289839272 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2748091603053435, |
|
"acc_stderr": 0.039153454088478354, |
|
"acc_norm": 0.2748091603053435, |
|
"acc_norm_stderr": 0.039153454088478354 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.40404040404040403, |
|
"acc_stderr": 0.03496130972056128, |
|
"acc_norm": 0.40404040404040403, |
|
"acc_norm_stderr": 0.03496130972056128 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3310344827586207, |
|
"acc_stderr": 0.03921545312467122, |
|
"acc_norm": 0.3310344827586207, |
|
"acc_norm_stderr": 0.03921545312467122 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.04389869956808777, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.04389869956808777 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.18907563025210083, |
|
"acc_stderr": 0.02543511943810536, |
|
"acc_norm": 0.18907563025210083, |
|
"acc_norm_stderr": 0.02543511943810536 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.21794871794871795, |
|
"acc_stderr": 0.020932445774463175, |
|
"acc_norm": 0.21794871794871795, |
|
"acc_norm_stderr": 0.020932445774463175 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.03861229196653694, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.03861229196653694 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.03684529491774708, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.03684529491774708 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.1921182266009852, |
|
"acc_stderr": 0.027719315709614778, |
|
"acc_norm": 0.1921182266009852, |
|
"acc_norm_stderr": 0.027719315709614778 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.25483870967741934, |
|
"acc_stderr": 0.024790118459332204, |
|
"acc_norm": 0.25483870967741934, |
|
"acc_norm_stderr": 0.024790118459332204 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2905982905982906, |
|
"acc_stderr": 0.029745048572674057, |
|
"acc_norm": 0.2905982905982906, |
|
"acc_norm_stderr": 0.029745048572674057 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.29056603773584905, |
|
"acc_stderr": 0.027943219989337156, |
|
"acc_norm": 0.29056603773584905, |
|
"acc_norm_stderr": 0.027943219989337156 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2636363636363636, |
|
"acc_stderr": 0.04220224692971987, |
|
"acc_norm": 0.2636363636363636, |
|
"acc_norm_stderr": 0.04220224692971987 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.026719240783712166, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.026719240783712166 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.03710185726119995, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.03710185726119995 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.19402985074626866, |
|
"acc_stderr": 0.027962677604768914, |
|
"acc_norm": 0.19402985074626866, |
|
"acc_norm_stderr": 0.027962677604768914 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.03186209851641143, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.03186209851641143 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24867724867724866, |
|
"acc_stderr": 0.02226181769240018, |
|
"acc_norm": 0.24867724867724866, |
|
"acc_norm_stderr": 0.02226181769240018 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.038760854559127644, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.038760854559127644 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.04093601807403326, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.04093601807403326 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2774566473988439, |
|
"acc_stderr": 0.024105712607754307, |
|
"acc_norm": 0.2774566473988439, |
|
"acc_norm_stderr": 0.024105712607754307 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3067484662576687, |
|
"acc_stderr": 0.036230899157241474, |
|
"acc_norm": 0.3067484662576687, |
|
"acc_norm_stderr": 0.036230899157241474 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.023788583551658537, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.023788583551658537 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768077, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768077 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.22797927461139897, |
|
"acc_stderr": 0.030276909945178253, |
|
"acc_norm": 0.22797927461139897, |
|
"acc_norm_stderr": 0.030276909945178253 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.043391383225798594, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.043391383225798594 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3376146788990826, |
|
"acc_stderr": 0.02027526598663891, |
|
"acc_norm": 0.3376146788990826, |
|
"acc_norm_stderr": 0.02027526598663891 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.038095238095238126, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.038095238095238126 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.025553169991826524, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.025553169991826524 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.24793388429752067, |
|
"acc_stderr": 0.03941897526516302, |
|
"acc_norm": 0.24793388429752067, |
|
"acc_norm_stderr": 0.03941897526516302 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03459777606810537, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03459777606810537 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.01774089950917779, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.01774089950917779 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.22340425531914893, |
|
"acc_stderr": 0.024847921358063962, |
|
"acc_norm": 0.22340425531914893, |
|
"acc_norm_stderr": 0.024847921358063962 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.04157751539865629, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.04157751539865629 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.24537037037037038, |
|
"acc_stderr": 0.029346665094372937, |
|
"acc_norm": 0.24537037037037038, |
|
"acc_norm_stderr": 0.029346665094372937 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3786764705882353, |
|
"acc_stderr": 0.02946513363977613, |
|
"acc_norm": 0.3786764705882353, |
|
"acc_norm_stderr": 0.02946513363977613 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.35918367346938773, |
|
"acc_stderr": 0.03071356045510849, |
|
"acc_norm": 0.35918367346938773, |
|
"acc_norm_stderr": 0.03071356045510849 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.24472573839662448, |
|
"acc_stderr": 0.027985699387036416, |
|
"acc_norm": 0.24472573839662448, |
|
"acc_norm_stderr": 0.027985699387036416 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2588005215123859, |
|
"acc_stderr": 0.011186109046564608, |
|
"acc_norm": 0.2588005215123859, |
|
"acc_norm_stderr": 0.011186109046564608 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.031660096793998116, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.031660096793998116 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.03123475237772118, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.03123475237772118 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.27906976744186046, |
|
"mc1_stderr": 0.015702107090627887, |
|
"mc2": 0.4515720476496737, |
|
"mc2_stderr": 0.015493161984611252 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2680047225501771, |
|
"acc_stderr": 0.015227905796335147, |
|
"acc_norm": 0.3707201889020071, |
|
"acc_norm_stderr": 0.016605801289212598 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "42MARU/polyglot-ko-12.8b-instruct", |
|
"model_sha": "a8354bcedc167e8e1f7dac8a347bf4b61d9c9bf0", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |