|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.27559726962457337, |
|
"acc_stderr": 0.01305716965576184, |
|
"acc_norm": 0.3370307167235495, |
|
"acc_norm_stderr": 0.01381347665290227 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.355008962358096, |
|
"acc_stderr": 0.004775380866948017, |
|
"acc_norm": 0.44971121290579563, |
|
"acc_norm_stderr": 0.004964479324552527 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.3684210526315789, |
|
"acc_stderr": 0.036996580176568775, |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.036996580176568775 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.23300970873786409, |
|
"acc_stderr": 0.041858325989283136, |
|
"acc_norm": 0.23300970873786409, |
|
"acc_norm_stderr": 0.041858325989283136 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24265644955300128, |
|
"acc_stderr": 0.015329888940899863, |
|
"acc_norm": 0.24265644955300128, |
|
"acc_norm_stderr": 0.015329888940899863 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.22962962962962963, |
|
"acc_stderr": 0.03633384414073465, |
|
"acc_norm": 0.22962962962962963, |
|
"acc_norm_stderr": 0.03633384414073465 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.26382978723404255, |
|
"acc_stderr": 0.028809989854102987, |
|
"acc_norm": 0.26382978723404255, |
|
"acc_norm_stderr": 0.028809989854102987 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3433734939759036, |
|
"acc_stderr": 0.03696584317010601, |
|
"acc_norm": 0.3433734939759036, |
|
"acc_norm_stderr": 0.03696584317010601 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.27009646302250806, |
|
"acc_stderr": 0.02521804037341062, |
|
"acc_norm": 0.27009646302250806, |
|
"acc_norm_stderr": 0.02521804037341062 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2600896860986547, |
|
"acc_stderr": 0.029442495585857476, |
|
"acc_norm": 0.2600896860986547, |
|
"acc_norm_stderr": 0.029442495585857476 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2748091603053435, |
|
"acc_stderr": 0.03915345408847835, |
|
"acc_norm": 0.2748091603053435, |
|
"acc_norm_stderr": 0.03915345408847835 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.18181818181818182, |
|
"acc_stderr": 0.0274796030105388, |
|
"acc_norm": 0.18181818181818182, |
|
"acc_norm_stderr": 0.0274796030105388 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2413793103448276, |
|
"acc_stderr": 0.03565998174135302, |
|
"acc_norm": 0.2413793103448276, |
|
"acc_norm_stderr": 0.03565998174135302 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.042207736591714534, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.042207736591714534 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.02665353159671549, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.02665353159671549 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2205128205128205, |
|
"acc_stderr": 0.0210206726808279, |
|
"acc_norm": 0.2205128205128205, |
|
"acc_norm_stderr": 0.0210206726808279 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.1477832512315271, |
|
"acc_stderr": 0.02496962133352127, |
|
"acc_norm": 0.1477832512315271, |
|
"acc_norm_stderr": 0.02496962133352127 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.1870967741935484, |
|
"acc_stderr": 0.022185710092252255, |
|
"acc_norm": 0.1870967741935484, |
|
"acc_norm_stderr": 0.022185710092252255 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3076923076923077, |
|
"acc_stderr": 0.0302363899421731, |
|
"acc_norm": 0.3076923076923077, |
|
"acc_norm_stderr": 0.0302363899421731 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.20754716981132076, |
|
"acc_stderr": 0.024959918028911274, |
|
"acc_norm": 0.20754716981132076, |
|
"acc_norm_stderr": 0.024959918028911274 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2818181818181818, |
|
"acc_stderr": 0.04309118709946459, |
|
"acc_norm": 0.2818181818181818, |
|
"acc_norm_stderr": 0.04309118709946459 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2851851851851852, |
|
"acc_stderr": 0.027528599210340492, |
|
"acc_norm": 0.2851851851851852, |
|
"acc_norm_stderr": 0.027528599210340492 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.1986754966887417, |
|
"acc_stderr": 0.03257847384436775, |
|
"acc_norm": 0.1986754966887417, |
|
"acc_norm_stderr": 0.03257847384436775 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.23383084577114427, |
|
"acc_stderr": 0.02992941540834838, |
|
"acc_norm": 0.23383084577114427, |
|
"acc_norm_stderr": 0.02992941540834838 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.03186209851641143, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.03186209851641143 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.21164021164021163, |
|
"acc_stderr": 0.02103733150526289, |
|
"acc_norm": 0.21164021164021163, |
|
"acc_norm_stderr": 0.02103733150526289 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23410404624277456, |
|
"acc_stderr": 0.022797110278071134, |
|
"acc_norm": 0.23410404624277456, |
|
"acc_norm_stderr": 0.022797110278071134 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2331288343558282, |
|
"acc_stderr": 0.0332201579577674, |
|
"acc_norm": 0.2331288343558282, |
|
"acc_norm_stderr": 0.0332201579577674 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.0246596851859673, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.0246596851859673 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.19689119170984457, |
|
"acc_stderr": 0.028697873971860677, |
|
"acc_norm": 0.19689119170984457, |
|
"acc_norm_stderr": 0.028697873971860677 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.1926605504587156, |
|
"acc_stderr": 0.016909276884936097, |
|
"acc_norm": 0.1926605504587156, |
|
"acc_norm_stderr": 0.016909276884936097 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.18253968253968253, |
|
"acc_stderr": 0.03455071019102149, |
|
"acc_norm": 0.18253968253968253, |
|
"acc_norm_stderr": 0.03455071019102149 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2581699346405229, |
|
"acc_stderr": 0.025058503316958164, |
|
"acc_norm": 0.2581699346405229, |
|
"acc_norm_stderr": 0.025058503316958164 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.23140495867768596, |
|
"acc_stderr": 0.03849856098794087, |
|
"acc_norm": 0.23140495867768596, |
|
"acc_norm_stderr": 0.03849856098794087 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17105263157894737, |
|
"acc_stderr": 0.030643607071677105, |
|
"acc_norm": 0.17105263157894737, |
|
"acc_norm_stderr": 0.030643607071677105 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.017630827375148383, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.017630827375148383 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.26595744680851063, |
|
"acc_stderr": 0.026358065698880585, |
|
"acc_norm": 0.26595744680851063, |
|
"acc_norm_stderr": 0.026358065698880585 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.04464285714285714, |
|
"acc_norm": 0.33035714285714285, |
|
"acc_norm_stderr": 0.04464285714285714 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.02876511171804694, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.02876511171804694 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.28044692737430166, |
|
"acc_stderr": 0.01502408388332288, |
|
"acc_norm": 0.28044692737430166, |
|
"acc_norm_stderr": 0.01502408388332288 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.20220588235294118, |
|
"acc_stderr": 0.02439819298665492, |
|
"acc_norm": 0.20220588235294118, |
|
"acc_norm_stderr": 0.02439819298665492 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.1836734693877551, |
|
"acc_stderr": 0.024789071332007636, |
|
"acc_norm": 0.1836734693877551, |
|
"acc_norm_stderr": 0.024789071332007636 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2869198312236287, |
|
"acc_stderr": 0.02944377302259469, |
|
"acc_norm": 0.2869198312236287, |
|
"acc_norm_stderr": 0.02944377302259469 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2516297262059974, |
|
"acc_stderr": 0.011083276280441902, |
|
"acc_norm": 0.2516297262059974, |
|
"acc_norm_stderr": 0.011083276280441902 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.030190282453501954, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.030190282453501954 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.22424242424242424, |
|
"acc_stderr": 0.032568666616811015, |
|
"acc_norm": 0.22424242424242424, |
|
"acc_norm_stderr": 0.032568666616811015 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2533659730722154, |
|
"mc1_stderr": 0.01522589934082683, |
|
"mc2": 0.4063966962881522, |
|
"mc2_stderr": 0.01492795604718442 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.34946871310507677, |
|
"acc_stderr": 0.016392797085769843, |
|
"acc_norm": 0.4592680047225502, |
|
"acc_norm_stderr": 0.017133218276537673 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/AIFT-instruct-SFT-1.3B-v2.1.1", |
|
"model_sha": "4d434f21f7343f698e1d175cf9e740a0238c9cb9", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |