|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3097269624573379, |
|
"acc_stderr": 0.013512058415238361, |
|
"acc_norm": 0.3626279863481229, |
|
"acc_norm_stderr": 0.01404910656495502 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3571001792471619, |
|
"acc_stderr": 0.004781654610857131, |
|
"acc_norm": 0.4563831905994822, |
|
"acc_norm_stderr": 0.004970759774676884 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4678362573099415, |
|
"acc_stderr": 0.038268824176603676, |
|
"acc_norm": 0.4678362573099415, |
|
"acc_norm_stderr": 0.038268824176603676 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.048657775704107696, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.048657775704107696 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.45721583652618136, |
|
"acc_stderr": 0.01781438523853444, |
|
"acc_norm": 0.45721583652618136, |
|
"acc_norm_stderr": 0.01781438523853444 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3851851851851852, |
|
"acc_stderr": 0.042039210401562783, |
|
"acc_norm": 0.3851851851851852, |
|
"acc_norm_stderr": 0.042039210401562783 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.33617021276595743, |
|
"acc_stderr": 0.030881618520676942, |
|
"acc_norm": 0.33617021276595743, |
|
"acc_norm_stderr": 0.030881618520676942 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.39759036144578314, |
|
"acc_stderr": 0.038099730845402184, |
|
"acc_norm": 0.39759036144578314, |
|
"acc_norm_stderr": 0.038099730845402184 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4340836012861736, |
|
"acc_stderr": 0.02815023224453559, |
|
"acc_norm": 0.4340836012861736, |
|
"acc_norm_stderr": 0.02815023224453559 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.452914798206278, |
|
"acc_stderr": 0.033408675019233246, |
|
"acc_norm": 0.452914798206278, |
|
"acc_norm_stderr": 0.033408675019233246 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.42748091603053434, |
|
"acc_stderr": 0.043389203057924, |
|
"acc_norm": 0.42748091603053434, |
|
"acc_norm_stderr": 0.043389203057924 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.40404040404040403, |
|
"acc_stderr": 0.03496130972056127, |
|
"acc_norm": 0.40404040404040403, |
|
"acc_norm_stderr": 0.03496130972056127 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.04144311810878151, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617747, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617747 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3865546218487395, |
|
"acc_stderr": 0.0316314580755238, |
|
"acc_norm": 0.3865546218487395, |
|
"acc_norm_stderr": 0.0316314580755238 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.382051282051282, |
|
"acc_stderr": 0.024635549163908223, |
|
"acc_norm": 0.382051282051282, |
|
"acc_norm_stderr": 0.024635549163908223 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.04812917324536823, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.04812917324536823 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.35960591133004927, |
|
"acc_stderr": 0.03376458246509568, |
|
"acc_norm": 0.35960591133004927, |
|
"acc_norm_stderr": 0.03376458246509568 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.027869320571664635, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.027869320571664635 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6623931623931624, |
|
"acc_stderr": 0.030980296992618554, |
|
"acc_norm": 0.6623931623931624, |
|
"acc_norm_stderr": 0.030980296992618554 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4037735849056604, |
|
"acc_stderr": 0.03019761160019795, |
|
"acc_norm": 0.4037735849056604, |
|
"acc_norm_stderr": 0.03019761160019795 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.04769300568972742, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.04769300568972742 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3074074074074074, |
|
"acc_stderr": 0.02813325257881564, |
|
"acc_norm": 0.3074074074074074, |
|
"acc_norm_stderr": 0.02813325257881564 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.26490066225165565, |
|
"acc_stderr": 0.03603038545360384, |
|
"acc_norm": 0.26490066225165565, |
|
"acc_norm_stderr": 0.03603038545360384 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5174129353233831, |
|
"acc_stderr": 0.03533389234739245, |
|
"acc_norm": 0.5174129353233831, |
|
"acc_norm_stderr": 0.03533389234739245 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3352601156069364, |
|
"acc_stderr": 0.03599586301247077, |
|
"acc_norm": 0.3352601156069364, |
|
"acc_norm_stderr": 0.03599586301247077 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.37566137566137564, |
|
"acc_stderr": 0.02494236893115978, |
|
"acc_norm": 0.37566137566137564, |
|
"acc_norm_stderr": 0.02494236893115978 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.038990736873573344, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.038990736873573344 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49421965317919075, |
|
"acc_stderr": 0.02691729617914911, |
|
"acc_norm": 0.49421965317919075, |
|
"acc_norm_stderr": 0.02691729617914911 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3987730061349693, |
|
"acc_stderr": 0.038470214204560246, |
|
"acc_norm": 0.3987730061349693, |
|
"acc_norm_stderr": 0.038470214204560246 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4567901234567901, |
|
"acc_stderr": 0.02771666165019404, |
|
"acc_norm": 0.4567901234567901, |
|
"acc_norm_stderr": 0.02771666165019404 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.42487046632124353, |
|
"acc_stderr": 0.0356747133521254, |
|
"acc_norm": 0.42487046632124353, |
|
"acc_norm_stderr": 0.0356747133521254 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21929824561403508, |
|
"acc_stderr": 0.03892431106518753, |
|
"acc_norm": 0.21929824561403508, |
|
"acc_norm_stderr": 0.03892431106518753 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4055045871559633, |
|
"acc_stderr": 0.02105099799189684, |
|
"acc_norm": 0.4055045871559633, |
|
"acc_norm_stderr": 0.02105099799189684 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.042163702135578345, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.042163702135578345 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.37254901960784315, |
|
"acc_stderr": 0.027684181883302884, |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.027684181883302884 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3684210526315789, |
|
"acc_stderr": 0.03925523381052932, |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.03925523381052932 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.369281045751634, |
|
"acc_stderr": 0.01952431674486635, |
|
"acc_norm": 0.369281045751634, |
|
"acc_norm_stderr": 0.01952431674486635 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32269503546099293, |
|
"acc_stderr": 0.02788913930053478, |
|
"acc_norm": 0.32269503546099293, |
|
"acc_norm_stderr": 0.02788913930053478 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4017857142857143, |
|
"acc_stderr": 0.04653333146973646, |
|
"acc_norm": 0.4017857142857143, |
|
"acc_norm_stderr": 0.04653333146973646 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23798882681564246, |
|
"acc_stderr": 0.014242630070574892, |
|
"acc_norm": 0.23798882681564246, |
|
"acc_norm_stderr": 0.014242630070574892 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.05021167315686779, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.05021167315686779 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3161764705882353, |
|
"acc_stderr": 0.02824568739146292, |
|
"acc_norm": 0.3161764705882353, |
|
"acc_norm_stderr": 0.02824568739146292 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2979591836734694, |
|
"acc_stderr": 0.029279567411065677, |
|
"acc_norm": 0.2979591836734694, |
|
"acc_norm_stderr": 0.029279567411065677 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5527426160337553, |
|
"acc_stderr": 0.03236564251614192, |
|
"acc_norm": 0.5527426160337553, |
|
"acc_norm_stderr": 0.03236564251614192 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.33833116036505867, |
|
"acc_stderr": 0.012084265626344202, |
|
"acc_norm": 0.33833116036505867, |
|
"acc_norm_stderr": 0.012084265626344202 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.37254901960784315, |
|
"acc_stderr": 0.03393388584958403, |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.03393388584958403 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3151515151515151, |
|
"acc_stderr": 0.0362773057502241, |
|
"acc_norm": 0.3151515151515151, |
|
"acc_norm_stderr": 0.0362773057502241 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3072215422276622, |
|
"mc1_stderr": 0.016150201321323002, |
|
"mc2": 0.47435317492542983, |
|
"mc2_stderr": 0.015496855268461061 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3907910271546635, |
|
"acc_stderr": 0.01677529846510825, |
|
"acc_norm": 0.45690672963400236, |
|
"acc_norm_stderr": 0.017126389093086777 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "LI-ST/Mistral-7B-ko-v0.8", |
|
"model_sha": "49bb6983b858b53fcd9bcb996bc33feeffc4d8a1", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |