|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.18430034129692832, |
|
"acc_stderr": 0.011330517933037411, |
|
"acc_norm": 0.21160409556313994, |
|
"acc_norm_stderr": 0.011935916358632857 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.29247161919936265, |
|
"acc_stderr": 0.004539680764142175, |
|
"acc_norm": 0.32354112726548495, |
|
"acc_norm_stderr": 0.004668710689192412 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.19298245614035087, |
|
"acc_stderr": 0.03026745755489847, |
|
"acc_norm": 0.19298245614035087, |
|
"acc_norm_stderr": 0.03026745755489847 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.1553398058252427, |
|
"acc_stderr": 0.03586594738573975, |
|
"acc_norm": 0.1553398058252427, |
|
"acc_norm_stderr": 0.03586594738573975 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.26436781609195403, |
|
"acc_stderr": 0.01576998484069052, |
|
"acc_norm": 0.26436781609195403, |
|
"acc_norm_stderr": 0.01576998484069052 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.21481481481481482, |
|
"acc_stderr": 0.035478541985608264, |
|
"acc_norm": 0.21481481481481482, |
|
"acc_norm_stderr": 0.035478541985608264 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.029241883869628817, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.029241883869628817 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.28313253012048195, |
|
"acc_stderr": 0.03507295431370519, |
|
"acc_norm": 0.28313253012048195, |
|
"acc_norm_stderr": 0.03507295431370519 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.26688102893890675, |
|
"acc_stderr": 0.025122637608816646, |
|
"acc_norm": 0.26688102893890675, |
|
"acc_norm_stderr": 0.025122637608816646 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.30493273542600896, |
|
"acc_stderr": 0.030898610882477515, |
|
"acc_norm": 0.30493273542600896, |
|
"acc_norm_stderr": 0.030898610882477515 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.26717557251908397, |
|
"acc_stderr": 0.03880848301082396, |
|
"acc_norm": 0.26717557251908397, |
|
"acc_norm_stderr": 0.03880848301082396 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.31313131313131315, |
|
"acc_stderr": 0.033042050878136525, |
|
"acc_norm": 0.31313131313131315, |
|
"acc_norm_stderr": 0.033042050878136525 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.03855289616378948, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.03855289616378948 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179962, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179962 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3445378151260504, |
|
"acc_stderr": 0.030868682604121622, |
|
"acc_norm": 0.3445378151260504, |
|
"acc_norm_stderr": 0.030868682604121622 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.02390115797940252, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.02390115797940252 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816505, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816505 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252626, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252626 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.21674876847290642, |
|
"acc_stderr": 0.028990331252516235, |
|
"acc_norm": 0.21674876847290642, |
|
"acc_norm_stderr": 0.028990331252516235 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3193548387096774, |
|
"acc_stderr": 0.026522709674667765, |
|
"acc_norm": 0.3193548387096774, |
|
"acc_norm_stderr": 0.026522709674667765 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.19658119658119658, |
|
"acc_stderr": 0.02603538609895129, |
|
"acc_norm": 0.19658119658119658, |
|
"acc_norm_stderr": 0.02603538609895129 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.20754716981132076, |
|
"acc_stderr": 0.02495991802891127, |
|
"acc_norm": 0.20754716981132076, |
|
"acc_norm_stderr": 0.02495991802891127 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.23636363636363636, |
|
"acc_stderr": 0.04069306319721376, |
|
"acc_norm": 0.23636363636363636, |
|
"acc_norm_stderr": 0.04069306319721376 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.02684205787383371, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.02684205787383371 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.03822746937658753, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.03822746937658753 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2935323383084577, |
|
"acc_stderr": 0.03220024104534205, |
|
"acc_norm": 0.2935323383084577, |
|
"acc_norm_stderr": 0.03220024104534205 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2658959537572254, |
|
"acc_stderr": 0.03368762932259431, |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.03368762932259431 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24603174603174602, |
|
"acc_stderr": 0.022182037202948368, |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.022182037202948368 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.023267528432100174, |
|
"acc_norm": 0.24855491329479767, |
|
"acc_norm_stderr": 0.023267528432100174 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.294478527607362, |
|
"acc_stderr": 0.03581165790474082, |
|
"acc_norm": 0.294478527607362, |
|
"acc_norm_stderr": 0.03581165790474082 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.02409347123262133, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.02409347123262133 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2538860103626943, |
|
"acc_stderr": 0.03141024780565319, |
|
"acc_norm": 0.2538860103626943, |
|
"acc_norm_stderr": 0.03141024780565319 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21929824561403508, |
|
"acc_stderr": 0.03892431106518753, |
|
"acc_norm": 0.21929824561403508, |
|
"acc_norm_stderr": 0.03892431106518753 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.30275229357798167, |
|
"acc_stderr": 0.01969871143475635, |
|
"acc_norm": 0.30275229357798167, |
|
"acc_norm_stderr": 0.01969871143475635 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.038095238095238106, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.038095238095238106 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.27124183006535946, |
|
"acc_stderr": 0.025457756696667878, |
|
"acc_norm": 0.27124183006535946, |
|
"acc_norm_stderr": 0.025457756696667878 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.2975206611570248, |
|
"acc_stderr": 0.04173349148083499, |
|
"acc_norm": 0.2975206611570248, |
|
"acc_norm_stderr": 0.04173349148083499 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03459777606810537, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03459777606810537 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.21895424836601307, |
|
"acc_stderr": 0.016729937565537537, |
|
"acc_norm": 0.21895424836601307, |
|
"acc_norm_stderr": 0.016729937565537537 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.23404255319148937, |
|
"acc_stderr": 0.02525786135943241, |
|
"acc_norm": 0.23404255319148937, |
|
"acc_norm_stderr": 0.02525786135943241 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.04327040932578728, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.04327040932578728 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.033953227263757976, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.033953227263757976 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4522058823529412, |
|
"acc_stderr": 0.030233758551596452, |
|
"acc_norm": 0.4522058823529412, |
|
"acc_norm_stderr": 0.030233758551596452 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.31020408163265306, |
|
"acc_stderr": 0.029613459872484378, |
|
"acc_norm": 0.31020408163265306, |
|
"acc_norm_stderr": 0.029613459872484378 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.27848101265822783, |
|
"acc_stderr": 0.029178682304842555, |
|
"acc_norm": 0.27848101265822783, |
|
"acc_norm_stderr": 0.029178682304842555 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2561929595827901, |
|
"acc_stderr": 0.01114917315311058, |
|
"acc_norm": 0.2561929595827901, |
|
"acc_norm_stderr": 0.01114917315311058 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.030778554678693275, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.030778554678693275 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03225078108306289, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03225078108306289 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.25458996328029376, |
|
"mc1_stderr": 0.015250117079156479, |
|
"mc2": 0.4518199326851351, |
|
"mc2_stderr": 0.016577184952558062 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.256198347107438, |
|
"acc_stderr": 0.015008301644712976, |
|
"acc_norm": 0.3577331759149941, |
|
"acc_norm_stderr": 0.01647980893574998 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Alphacode-AI-Team/Alpha-LLM-Mistral7B-v4", |
|
"model_sha": "99b2981a5fb3f2171396eeddc8833e6bf992a46c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |