|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.47013651877133106, |
|
"acc_stderr": 0.014585305840007102, |
|
"acc_norm": 0.5392491467576792, |
|
"acc_norm_stderr": 0.014566303676636586 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.45439155546703847, |
|
"acc_stderr": 0.004968979259738328, |
|
"acc_norm": 0.6374228241386178, |
|
"acc_norm_stderr": 0.004797616754372309 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6783625730994152, |
|
"acc_stderr": 0.03582529442573122, |
|
"acc_norm": 0.6783625730994152, |
|
"acc_norm_stderr": 0.03582529442573122 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6893203883495146, |
|
"acc_stderr": 0.04582124160161551, |
|
"acc_norm": 0.6893203883495146, |
|
"acc_norm_stderr": 0.04582124160161551 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.7139208173690932, |
|
"acc_stderr": 0.016160871405127515, |
|
"acc_norm": 0.7139208173690932, |
|
"acc_norm_stderr": 0.016160871405127515 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.5111111111111111, |
|
"acc_stderr": 0.04318275491977976, |
|
"acc_norm": 0.5111111111111111, |
|
"acc_norm_stderr": 0.04318275491977976 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768076, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768076 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.574468085106383, |
|
"acc_stderr": 0.0323214691622447, |
|
"acc_norm": 0.574468085106383, |
|
"acc_norm_stderr": 0.0323214691622447 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5301204819277109, |
|
"acc_stderr": 0.03885425420866767, |
|
"acc_norm": 0.5301204819277109, |
|
"acc_norm_stderr": 0.03885425420866767 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6366559485530546, |
|
"acc_stderr": 0.027316847674192714, |
|
"acc_norm": 0.6366559485530546, |
|
"acc_norm_stderr": 0.027316847674192714 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.6278026905829597, |
|
"acc_stderr": 0.03244305283008731, |
|
"acc_norm": 0.6278026905829597, |
|
"acc_norm_stderr": 0.03244305283008731 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6335877862595419, |
|
"acc_stderr": 0.04225875451969639, |
|
"acc_norm": 0.6335877862595419, |
|
"acc_norm_stderr": 0.04225875451969639 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956913, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956913 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.797979797979798, |
|
"acc_stderr": 0.028606204289229872, |
|
"acc_norm": 0.797979797979798, |
|
"acc_norm_stderr": 0.028606204289229872 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.496551724137931, |
|
"acc_stderr": 0.041665675771015785, |
|
"acc_norm": 0.496551724137931, |
|
"acc_norm_stderr": 0.041665675771015785 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3431372549019608, |
|
"acc_stderr": 0.04724007352383888, |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.04724007352383888 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6470588235294118, |
|
"acc_stderr": 0.031041941304059288, |
|
"acc_norm": 0.6470588235294118, |
|
"acc_norm_stderr": 0.031041941304059288 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5666666666666667, |
|
"acc_stderr": 0.0251246535258851, |
|
"acc_norm": 0.5666666666666667, |
|
"acc_norm_stderr": 0.0251246535258851 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6759259259259259, |
|
"acc_stderr": 0.04524596007030048, |
|
"acc_norm": 0.6759259259259259, |
|
"acc_norm_stderr": 0.04524596007030048 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43842364532019706, |
|
"acc_stderr": 0.03491207857486518, |
|
"acc_norm": 0.43842364532019706, |
|
"acc_norm_stderr": 0.03491207857486518 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.6580645161290323, |
|
"acc_stderr": 0.02698528957655273, |
|
"acc_norm": 0.6580645161290323, |
|
"acc_norm_stderr": 0.02698528957655273 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.811965811965812, |
|
"acc_stderr": 0.025598193686652258, |
|
"acc_norm": 0.811965811965812, |
|
"acc_norm_stderr": 0.025598193686652258 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5849056603773585, |
|
"acc_stderr": 0.030325945789286105, |
|
"acc_norm": 0.5849056603773585, |
|
"acc_norm_stderr": 0.030325945789286105 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6454545454545455, |
|
"acc_stderr": 0.045820048415054174, |
|
"acc_norm": 0.6454545454545455, |
|
"acc_norm_stderr": 0.045820048415054174 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.31851851851851853, |
|
"acc_stderr": 0.02840653309060846, |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.02840653309060846 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.038227469376587525, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.038227469376587525 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.736318407960199, |
|
"acc_stderr": 0.03115715086935557, |
|
"acc_norm": 0.736318407960199, |
|
"acc_norm_stderr": 0.03115715086935557 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.5433526011560693, |
|
"acc_stderr": 0.03798106566014498, |
|
"acc_norm": 0.5433526011560693, |
|
"acc_norm_stderr": 0.03798106566014498 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.025487187147859372, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.025487187147859372 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5833333333333334, |
|
"acc_stderr": 0.04122728707651282, |
|
"acc_norm": 0.5833333333333334, |
|
"acc_norm_stderr": 0.04122728707651282 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.75, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.75, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.6242774566473989, |
|
"acc_stderr": 0.02607431485165708, |
|
"acc_norm": 0.6242774566473989, |
|
"acc_norm_stderr": 0.02607431485165708 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5705521472392638, |
|
"acc_stderr": 0.038890666191127236, |
|
"acc_norm": 0.5705521472392638, |
|
"acc_norm_stderr": 0.038890666191127236 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6604938271604939, |
|
"acc_stderr": 0.026348564412011628, |
|
"acc_norm": 0.6604938271604939, |
|
"acc_norm_stderr": 0.026348564412011628 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7668393782383419, |
|
"acc_stderr": 0.03051611137147601, |
|
"acc_norm": 0.7668393782383419, |
|
"acc_norm_stderr": 0.03051611137147601 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.49122807017543857, |
|
"acc_stderr": 0.047028804320496165, |
|
"acc_norm": 0.49122807017543857, |
|
"acc_norm_stderr": 0.047028804320496165 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.726605504587156, |
|
"acc_stderr": 0.01910929984609828, |
|
"acc_norm": 0.726605504587156, |
|
"acc_norm_stderr": 0.01910929984609828 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.044444444444444495, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.044444444444444495 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6405228758169934, |
|
"acc_stderr": 0.027475969910660952, |
|
"acc_norm": 0.6405228758169934, |
|
"acc_norm_stderr": 0.027475969910660952 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7107438016528925, |
|
"acc_stderr": 0.041391127276354626, |
|
"acc_norm": 0.7107438016528925, |
|
"acc_norm_stderr": 0.041391127276354626 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.625, |
|
"acc_stderr": 0.039397364351956274, |
|
"acc_norm": 0.625, |
|
"acc_norm_stderr": 0.039397364351956274 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.545751633986928, |
|
"acc_stderr": 0.020142974553795205, |
|
"acc_norm": 0.545751633986928, |
|
"acc_norm_stderr": 0.020142974553795205 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.4219858156028369, |
|
"acc_stderr": 0.029462189233370576, |
|
"acc_norm": 0.4219858156028369, |
|
"acc_norm_stderr": 0.029462189233370576 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.41964285714285715, |
|
"acc_stderr": 0.04684099321077106, |
|
"acc_norm": 0.41964285714285715, |
|
"acc_norm_stderr": 0.04684099321077106 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5092592592592593, |
|
"acc_stderr": 0.034093869469927006, |
|
"acc_norm": 0.5092592592592593, |
|
"acc_norm_stderr": 0.034093869469927006 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24692737430167597, |
|
"acc_stderr": 0.01442229220480885, |
|
"acc_norm": 0.24692737430167597, |
|
"acc_norm_stderr": 0.01442229220480885 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.76, |
|
"acc_stderr": 0.04292346959909282, |
|
"acc_norm": 0.76, |
|
"acc_norm_stderr": 0.04292346959909282 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5404411764705882, |
|
"acc_stderr": 0.030273325077345755, |
|
"acc_norm": 0.5404411764705882, |
|
"acc_norm_stderr": 0.030273325077345755 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.7551020408163265, |
|
"acc_stderr": 0.02752963744017492, |
|
"acc_norm": 0.7551020408163265, |
|
"acc_norm_stderr": 0.02752963744017492 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7932489451476793, |
|
"acc_stderr": 0.026361651668389094, |
|
"acc_norm": 0.7932489451476793, |
|
"acc_norm_stderr": 0.026361651668389094 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4302477183833116, |
|
"acc_stderr": 0.012645361435115226, |
|
"acc_norm": 0.4302477183833116, |
|
"acc_norm_stderr": 0.012645361435115226 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.7696078431372549, |
|
"acc_stderr": 0.02955429260569508, |
|
"acc_norm": 0.7696078431372549, |
|
"acc_norm_stderr": 0.02955429260569508 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.7636363636363637, |
|
"acc_stderr": 0.03317505930009179, |
|
"acc_norm": 0.7636363636363637, |
|
"acc_norm_stderr": 0.03317505930009179 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3843329253365973, |
|
"mc1_stderr": 0.0170287073012452, |
|
"mc2": 0.5527618960115636, |
|
"mc2_stderr": 0.01569377342067013 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5548996458087367, |
|
"acc_stderr": 0.017086417431005464, |
|
"acc_norm": 0.5737898465171193, |
|
"acc_norm_stderr": 0.01700212260948925 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "JY623/KoSOLAR-10.7B-merge-v3.4", |
|
"model_sha": "82e4b39d0ce8f31db2b61c9f79e4e5cc9eb3d74c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |