|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.20563139931740615, |
|
"acc_stderr": 0.011810745260742585, |
|
"acc_norm": 0.25853242320819114, |
|
"acc_norm_stderr": 0.012794553754288666 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.252141007767377, |
|
"acc_stderr": 0.004333543083293473, |
|
"acc_norm": 0.24278032264489147, |
|
"acc_norm_stderr": 0.004278871104930363 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.25146198830409355, |
|
"acc_stderr": 0.033275044238468436, |
|
"acc_norm": 0.25146198830409355, |
|
"acc_norm_stderr": 0.033275044238468436 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.1941747572815534, |
|
"acc_stderr": 0.03916667762822584, |
|
"acc_norm": 0.1941747572815534, |
|
"acc_norm_stderr": 0.03916667762822584 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2720306513409962, |
|
"acc_stderr": 0.015913367447500517, |
|
"acc_norm": 0.2720306513409962, |
|
"acc_norm_stderr": 0.015913367447500517 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.040943762699967946, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.040943762699967946 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2127659574468085, |
|
"acc_stderr": 0.026754391348039787, |
|
"acc_norm": 0.2127659574468085, |
|
"acc_norm_stderr": 0.026754391348039787 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.20481927710843373, |
|
"acc_stderr": 0.03141784291663925, |
|
"acc_norm": 0.20481927710843373, |
|
"acc_norm_stderr": 0.03141784291663925 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3086816720257235, |
|
"acc_stderr": 0.026236965881153266, |
|
"acc_norm": 0.3086816720257235, |
|
"acc_norm_stderr": 0.026236965881153266 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.20179372197309417, |
|
"acc_stderr": 0.026936111912802263, |
|
"acc_norm": 0.20179372197309417, |
|
"acc_norm_stderr": 0.026936111912802263 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.22900763358778625, |
|
"acc_stderr": 0.036853466317118506, |
|
"acc_norm": 0.22900763358778625, |
|
"acc_norm_stderr": 0.036853466317118506 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2474747474747475, |
|
"acc_stderr": 0.030746300742124495, |
|
"acc_norm": 0.2474747474747475, |
|
"acc_norm_stderr": 0.030746300742124495 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.296551724137931, |
|
"acc_stderr": 0.038061426873099935, |
|
"acc_norm": 0.296551724137931, |
|
"acc_norm_stderr": 0.038061426873099935 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.040925639582376556, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.040925639582376556 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.18067226890756302, |
|
"acc_stderr": 0.024991964966600756, |
|
"acc_norm": 0.18067226890756302, |
|
"acc_norm_stderr": 0.024991964966600756 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.022421273612923714, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.022421273612923714 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.046482319871173156, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.046482319871173156 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.21296296296296297, |
|
"acc_stderr": 0.03957835471980981, |
|
"acc_norm": 0.21296296296296297, |
|
"acc_norm_stderr": 0.03957835471980981 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.03178529710642749, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.03178529710642749 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.22258064516129034, |
|
"acc_stderr": 0.02366421667164252, |
|
"acc_norm": 0.22258064516129034, |
|
"acc_norm_stderr": 0.02366421667164252 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.24786324786324787, |
|
"acc_stderr": 0.028286324075564386, |
|
"acc_norm": 0.24786324786324787, |
|
"acc_norm_stderr": 0.028286324075564386 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2188679245283019, |
|
"acc_stderr": 0.025447863825108597, |
|
"acc_norm": 0.2188679245283019, |
|
"acc_norm_stderr": 0.025447863825108597 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03955932861795833, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03955932861795833 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.026842057873833706, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.026842057873833706 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.03734535676787198, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.03734535676787198 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2537313432835821, |
|
"acc_stderr": 0.03076944496729602, |
|
"acc_norm": 0.2537313432835821, |
|
"acc_norm_stderr": 0.03076944496729602 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.24277456647398843, |
|
"acc_stderr": 0.0326926380614177, |
|
"acc_norm": 0.24277456647398843, |
|
"acc_norm_stderr": 0.0326926380614177 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.022019080012217897, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.022019080012217897 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2569444444444444, |
|
"acc_stderr": 0.03653946969442099, |
|
"acc_norm": 0.2569444444444444, |
|
"acc_norm_stderr": 0.03653946969442099 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.03861229196653694, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.03861229196653694 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2745664739884393, |
|
"acc_stderr": 0.024027745155265026, |
|
"acc_norm": 0.2745664739884393, |
|
"acc_norm_stderr": 0.024027745155265026 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2883435582822086, |
|
"acc_stderr": 0.035590395316173425, |
|
"acc_norm": 0.2883435582822086, |
|
"acc_norm_stderr": 0.035590395316173425 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.27469135802469136, |
|
"acc_stderr": 0.024836057868294688, |
|
"acc_norm": 0.27469135802469136, |
|
"acc_norm_stderr": 0.024836057868294688 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.20725388601036268, |
|
"acc_stderr": 0.02925282329180363, |
|
"acc_norm": 0.20725388601036268, |
|
"acc_norm_stderr": 0.02925282329180363 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.20550458715596331, |
|
"acc_stderr": 0.01732435232501601, |
|
"acc_norm": 0.20550458715596331, |
|
"acc_norm_stderr": 0.01732435232501601 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03670066451047181, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03670066451047181 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.024288619466046095, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.024288619466046095 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932267, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932267 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.3140495867768595, |
|
"acc_stderr": 0.04236964753041019, |
|
"acc_norm": 0.3140495867768595, |
|
"acc_norm_stderr": 0.04236964753041019 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.29605263157894735, |
|
"acc_stderr": 0.03715062154998905, |
|
"acc_norm": 0.29605263157894735, |
|
"acc_norm_stderr": 0.03715062154998905 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.01815287105153882, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.01815287105153882 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.26595744680851063, |
|
"acc_stderr": 0.02635806569888059, |
|
"acc_norm": 0.26595744680851063, |
|
"acc_norm_stderr": 0.02635806569888059 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.028353212866863445, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.028353212866863445 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2446927374301676, |
|
"acc_stderr": 0.014378169884098426, |
|
"acc_norm": 0.2446927374301676, |
|
"acc_norm_stderr": 0.014378169884098426 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.1875, |
|
"acc_stderr": 0.023709788253811766, |
|
"acc_norm": 0.1875, |
|
"acc_norm_stderr": 0.023709788253811766 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.22040816326530613, |
|
"acc_stderr": 0.026537045312145294, |
|
"acc_norm": 0.22040816326530613, |
|
"acc_norm_stderr": 0.026537045312145294 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2742616033755274, |
|
"acc_stderr": 0.029041333510598035, |
|
"acc_norm": 0.2742616033755274, |
|
"acc_norm_stderr": 0.029041333510598035 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.27053455019556716, |
|
"acc_stderr": 0.01134599674353926, |
|
"acc_norm": 0.27053455019556716, |
|
"acc_norm_stderr": 0.01134599674353926 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.28921568627450983, |
|
"acc_stderr": 0.03182231867647554, |
|
"acc_norm": 0.28921568627450983, |
|
"acc_norm_stderr": 0.03182231867647554 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2545454545454545, |
|
"acc_stderr": 0.03401506715249039, |
|
"acc_norm": 0.2545454545454545, |
|
"acc_norm_stderr": 0.03401506715249039 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.21909424724602203, |
|
"mc1_stderr": 0.014480038578757447, |
|
"mc2": NaN, |
|
"mc2_stderr": NaN |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.0892018779342723, |
|
"acc_stderr": 0.009770871054319058, |
|
"acc_norm": 0.22300469483568075, |
|
"acc_norm_stderr": 0.014269258984221392 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "yeen214/llama2_7b_small_tuning_v1", |
|
"model_sha": "3f9b43b4db2da4fe3785071dd52c9fc92aa0801d", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |