|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.697098976109215, |
|
"acc_stderr": 0.013428241573185349, |
|
"acc_norm": 0.7482935153583617, |
|
"acc_norm_stderr": 0.012682496334042963 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.5962955586536547, |
|
"acc_stderr": 0.004896368185765242, |
|
"acc_norm": 0.7506472814180443, |
|
"acc_norm_stderr": 0.004317541575275725 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.7251461988304093, |
|
"acc_stderr": 0.034240429246915824, |
|
"acc_norm": 0.7251461988304093, |
|
"acc_norm_stderr": 0.034240429246915824 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.7766990291262136, |
|
"acc_stderr": 0.04123553189891431, |
|
"acc_norm": 0.7766990291262136, |
|
"acc_norm_stderr": 0.04123553189891431 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.698595146871009, |
|
"acc_stderr": 0.016409091097268794, |
|
"acc_norm": 0.698595146871009, |
|
"acc_norm_stderr": 0.016409091097268794 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.043163785995113245, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.043163785995113245 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.5063829787234042, |
|
"acc_stderr": 0.032683358999363345, |
|
"acc_norm": 0.5063829787234042, |
|
"acc_norm_stderr": 0.032683358999363345 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4939759036144578, |
|
"acc_stderr": 0.03892212195333047, |
|
"acc_norm": 0.4939759036144578, |
|
"acc_norm_stderr": 0.03892212195333047 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6334405144694534, |
|
"acc_stderr": 0.02736807824397164, |
|
"acc_norm": 0.6334405144694534, |
|
"acc_norm_stderr": 0.02736807824397164 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.6457399103139013, |
|
"acc_stderr": 0.032100621541349864, |
|
"acc_norm": 0.6457399103139013, |
|
"acc_norm_stderr": 0.032100621541349864 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5801526717557252, |
|
"acc_stderr": 0.04328577215262972, |
|
"acc_norm": 0.5801526717557252, |
|
"acc_norm_stderr": 0.04328577215262972 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7474747474747475, |
|
"acc_stderr": 0.030954055470365907, |
|
"acc_norm": 0.7474747474747475, |
|
"acc_norm_stderr": 0.030954055470365907 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5448275862068965, |
|
"acc_stderr": 0.04149886942192118, |
|
"acc_norm": 0.5448275862068965, |
|
"acc_norm_stderr": 0.04149886942192118 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.046550104113196177, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.046550104113196177 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6134453781512605, |
|
"acc_stderr": 0.03163145807552378, |
|
"acc_norm": 0.6134453781512605, |
|
"acc_norm_stderr": 0.03163145807552378 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.6384615384615384, |
|
"acc_stderr": 0.024359581465397007, |
|
"acc_norm": 0.6384615384615384, |
|
"acc_norm_stderr": 0.024359581465397007 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.7, |
|
"acc_stderr": 0.04605661864718381, |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.04605661864718381 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6851851851851852, |
|
"acc_stderr": 0.04489931073591312, |
|
"acc_norm": 0.6851851851851852, |
|
"acc_norm_stderr": 0.04489931073591312 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4433497536945813, |
|
"acc_stderr": 0.03495334582162934, |
|
"acc_norm": 0.4433497536945813, |
|
"acc_norm_stderr": 0.03495334582162934 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.603225806451613, |
|
"acc_stderr": 0.027831231605767948, |
|
"acc_norm": 0.603225806451613, |
|
"acc_norm_stderr": 0.027831231605767948 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.8376068376068376, |
|
"acc_stderr": 0.024161618127987745, |
|
"acc_norm": 0.8376068376068376, |
|
"acc_norm_stderr": 0.024161618127987745 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5924528301886792, |
|
"acc_stderr": 0.030242233800854494, |
|
"acc_norm": 0.5924528301886792, |
|
"acc_norm_stderr": 0.030242233800854494 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6272727272727273, |
|
"acc_stderr": 0.04631381319425465, |
|
"acc_norm": 0.6272727272727273, |
|
"acc_norm_stderr": 0.04631381319425465 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.029723278961476668, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.029723278961476668 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3443708609271523, |
|
"acc_stderr": 0.038796870240733264, |
|
"acc_norm": 0.3443708609271523, |
|
"acc_norm_stderr": 0.038796870240733264 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7263681592039801, |
|
"acc_stderr": 0.03152439186555402, |
|
"acc_norm": 0.7263681592039801, |
|
"acc_norm_stderr": 0.03152439186555402 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.5375722543352601, |
|
"acc_stderr": 0.03801685104524458, |
|
"acc_norm": 0.5375722543352601, |
|
"acc_norm_stderr": 0.03801685104524458 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.4523809523809524, |
|
"acc_stderr": 0.025634258115554965, |
|
"acc_norm": 0.4523809523809524, |
|
"acc_norm_stderr": 0.025634258115554965 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.6111111111111112, |
|
"acc_stderr": 0.04076663253918567, |
|
"acc_norm": 0.6111111111111112, |
|
"acc_norm_stderr": 0.04076663253918567 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.74, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.74, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.6011560693641619, |
|
"acc_stderr": 0.02636243757454654, |
|
"acc_norm": 0.6011560693641619, |
|
"acc_norm_stderr": 0.02636243757454654 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5705521472392638, |
|
"acc_stderr": 0.03889066619112722, |
|
"acc_norm": 0.5705521472392638, |
|
"acc_norm_stderr": 0.03889066619112722 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6697530864197531, |
|
"acc_stderr": 0.026168298456732846, |
|
"acc_norm": 0.6697530864197531, |
|
"acc_norm_stderr": 0.026168298456732846 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7461139896373057, |
|
"acc_stderr": 0.03141024780565318, |
|
"acc_norm": 0.7461139896373057, |
|
"acc_norm_stderr": 0.03141024780565318 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.43859649122807015, |
|
"acc_stderr": 0.04668000738510455, |
|
"acc_norm": 0.43859649122807015, |
|
"acc_norm_stderr": 0.04668000738510455 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7321100917431193, |
|
"acc_stderr": 0.018987462257978652, |
|
"acc_norm": 0.7321100917431193, |
|
"acc_norm_stderr": 0.018987462257978652 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4126984126984127, |
|
"acc_stderr": 0.04403438954768177, |
|
"acc_norm": 0.4126984126984127, |
|
"acc_norm_stderr": 0.04403438954768177 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6241830065359477, |
|
"acc_stderr": 0.027732834353363947, |
|
"acc_norm": 0.6241830065359477, |
|
"acc_norm_stderr": 0.027732834353363947 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.7, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7355371900826446, |
|
"acc_stderr": 0.04026187527591207, |
|
"acc_norm": 0.7355371900826446, |
|
"acc_norm_stderr": 0.04026187527591207 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.6513157894736842, |
|
"acc_stderr": 0.03878139888797611, |
|
"acc_norm": 0.6513157894736842, |
|
"acc_norm_stderr": 0.03878139888797611 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5849673202614379, |
|
"acc_stderr": 0.01993362777685742, |
|
"acc_norm": 0.5849673202614379, |
|
"acc_norm_stderr": 0.01993362777685742 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.4397163120567376, |
|
"acc_stderr": 0.029609912075594113, |
|
"acc_norm": 0.4397163120567376, |
|
"acc_norm_stderr": 0.029609912075594113 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.04697113923010213, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.04697113923010213 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.0340763209385405, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.0340763209385405 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.4033519553072626, |
|
"acc_stderr": 0.016407123032195246, |
|
"acc_norm": 0.4033519553072626, |
|
"acc_norm_stderr": 0.016407123032195246 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.68, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.68, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5257352941176471, |
|
"acc_stderr": 0.030332578094555026, |
|
"acc_norm": 0.5257352941176471, |
|
"acc_norm_stderr": 0.030332578094555026 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.689795918367347, |
|
"acc_stderr": 0.02961345987248438, |
|
"acc_norm": 0.689795918367347, |
|
"acc_norm_stderr": 0.02961345987248438 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.729957805907173, |
|
"acc_stderr": 0.028900721906293426, |
|
"acc_norm": 0.729957805907173, |
|
"acc_norm_stderr": 0.028900721906293426 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4517601043024772, |
|
"acc_stderr": 0.012710662233660247, |
|
"acc_norm": 0.4517601043024772, |
|
"acc_norm_stderr": 0.012710662233660247 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6715686274509803, |
|
"acc_stderr": 0.032962451101722294, |
|
"acc_norm": 0.6715686274509803, |
|
"acc_norm_stderr": 0.032962451101722294 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6121212121212121, |
|
"acc_stderr": 0.0380491365397101, |
|
"acc_norm": 0.6121212121212121, |
|
"acc_norm_stderr": 0.0380491365397101 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.7221542227662179, |
|
"mc1_stderr": 0.015680929364024664, |
|
"mc2": 0.8117453553489173, |
|
"mc2_stderr": 0.01295465373920051 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.564344746162928, |
|
"acc_stderr": 0.017047415229476327, |
|
"acc_norm": 0.5820543093270366, |
|
"acc_norm_stderr": 0.01695729200527971 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "JY623/KoSOLAR-10.7B-merge-v2.0", |
|
"model_sha": "2b54466381de31e8945204dfe3bd6c0642cf9ce5", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |