|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.27559726962457337, |
|
"acc_stderr": 0.01305716965576184, |
|
"acc_norm": 0.33447098976109213, |
|
"acc_norm_stderr": 0.013787460322441379 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36138219478191597, |
|
"acc_stderr": 0.004794191785967943, |
|
"acc_norm": 0.45976897032463654, |
|
"acc_norm_stderr": 0.004973602904247795 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30409356725146197, |
|
"acc_stderr": 0.03528211258245231, |
|
"acc_norm": 0.30409356725146197, |
|
"acc_norm_stderr": 0.03528211258245231 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.13592233009708737, |
|
"acc_stderr": 0.03393295729761012, |
|
"acc_norm": 0.13592233009708737, |
|
"acc_norm_stderr": 0.03393295729761012 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.22988505747126436, |
|
"acc_stderr": 0.01504630184669182, |
|
"acc_norm": 0.22988505747126436, |
|
"acc_norm_stderr": 0.01504630184669182 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.03749850709174021, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.03749850709174021 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2978723404255319, |
|
"acc_stderr": 0.02989614568209546, |
|
"acc_norm": 0.2978723404255319, |
|
"acc_norm_stderr": 0.02989614568209546 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2891566265060241, |
|
"acc_stderr": 0.03529486801511115, |
|
"acc_norm": 0.2891566265060241, |
|
"acc_norm_stderr": 0.03529486801511115 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.26688102893890675, |
|
"acc_stderr": 0.025122637608816646, |
|
"acc_norm": 0.26688102893890675, |
|
"acc_norm_stderr": 0.025122637608816646 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.23318385650224216, |
|
"acc_stderr": 0.02838039114709472, |
|
"acc_norm": 0.23318385650224216, |
|
"acc_norm_stderr": 0.02838039114709472 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.25190839694656486, |
|
"acc_stderr": 0.03807387116306086, |
|
"acc_norm": 0.25190839694656486, |
|
"acc_norm_stderr": 0.03807387116306086 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.20202020202020202, |
|
"acc_stderr": 0.028606204289229872, |
|
"acc_norm": 0.20202020202020202, |
|
"acc_norm_stderr": 0.028606204289229872 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2620689655172414, |
|
"acc_stderr": 0.03664666337225256, |
|
"acc_norm": 0.2620689655172414, |
|
"acc_norm_stderr": 0.03664666337225256 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171453, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2815126050420168, |
|
"acc_stderr": 0.029213549414372174, |
|
"acc_norm": 0.2815126050420168, |
|
"acc_norm_stderr": 0.029213549414372174 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.24358974358974358, |
|
"acc_stderr": 0.021763733684173916, |
|
"acc_norm": 0.24358974358974358, |
|
"acc_norm_stderr": 0.021763733684173916 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.28703703703703703, |
|
"acc_stderr": 0.043733130409147614, |
|
"acc_norm": 0.28703703703703703, |
|
"acc_norm_stderr": 0.043733130409147614 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.22660098522167488, |
|
"acc_stderr": 0.029454863835292975, |
|
"acc_norm": 0.22660098522167488, |
|
"acc_norm_stderr": 0.029454863835292975 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.02606936229533513, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.02606936229533513 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.26495726495726496, |
|
"acc_stderr": 0.028911208802749475, |
|
"acc_norm": 0.26495726495726496, |
|
"acc_norm_stderr": 0.028911208802749475 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24150943396226415, |
|
"acc_stderr": 0.026341480371118352, |
|
"acc_norm": 0.24150943396226415, |
|
"acc_norm_stderr": 0.026341480371118352 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2636363636363636, |
|
"acc_stderr": 0.04220224692971987, |
|
"acc_norm": 0.2636363636363636, |
|
"acc_norm_stderr": 0.04220224692971987 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.027080372815145668, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.027080372815145668 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3973509933774834, |
|
"acc_stderr": 0.03995524007681681, |
|
"acc_norm": 0.3973509933774834, |
|
"acc_norm_stderr": 0.03995524007681681 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.29850746268656714, |
|
"acc_stderr": 0.03235743789355044, |
|
"acc_norm": 0.29850746268656714, |
|
"acc_norm_stderr": 0.03235743789355044 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.031568093627031744, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.031568093627031744 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.023068188848261128, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.023068188848261128 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03476590104304134, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03476590104304134 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.24277456647398843, |
|
"acc_stderr": 0.0230836585869842, |
|
"acc_norm": 0.24277456647398843, |
|
"acc_norm_stderr": 0.0230836585869842 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943354, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943354 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3055555555555556, |
|
"acc_stderr": 0.025630824975621348, |
|
"acc_norm": 0.3055555555555556, |
|
"acc_norm_stderr": 0.025630824975621348 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2849740932642487, |
|
"acc_stderr": 0.032577140777096614, |
|
"acc_norm": 0.2849740932642487, |
|
"acc_norm_stderr": 0.032577140777096614 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.042270544512322, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.042270544512322 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.26605504587155965, |
|
"acc_stderr": 0.018946022322225586, |
|
"acc_norm": 0.26605504587155965, |
|
"acc_norm_stderr": 0.018946022322225586 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04216370213557835, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04216370213557835 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2875816993464052, |
|
"acc_stderr": 0.02591780611714716, |
|
"acc_norm": 0.2875816993464052, |
|
"acc_norm_stderr": 0.02591780611714716 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.34710743801652894, |
|
"acc_stderr": 0.04345724570292534, |
|
"acc_norm": 0.34710743801652894, |
|
"acc_norm_stderr": 0.04345724570292534 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.20394736842105263, |
|
"acc_stderr": 0.032790004063100495, |
|
"acc_norm": 0.20394736842105263, |
|
"acc_norm_stderr": 0.032790004063100495 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2565359477124183, |
|
"acc_stderr": 0.017667841612379002, |
|
"acc_norm": 0.2565359477124183, |
|
"acc_norm_stderr": 0.017667841612379002 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2695035460992908, |
|
"acc_stderr": 0.02646903681859063, |
|
"acc_norm": 0.2695035460992908, |
|
"acc_norm_stderr": 0.02646903681859063 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03894641120044792, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03894641120044792 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.030546745264953202, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.030546745264953202 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.43014705882352944, |
|
"acc_stderr": 0.030074971917302875, |
|
"acc_norm": 0.43014705882352944, |
|
"acc_norm_stderr": 0.030074971917302875 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2816326530612245, |
|
"acc_stderr": 0.028795185574291293, |
|
"acc_norm": 0.2816326530612245, |
|
"acc_norm_stderr": 0.028795185574291293 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.24472573839662448, |
|
"acc_stderr": 0.027985699387036416, |
|
"acc_norm": 0.24472573839662448, |
|
"acc_norm_stderr": 0.027985699387036416 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24967405475880053, |
|
"acc_stderr": 0.011054538377832318, |
|
"acc_norm": 0.24967405475880053, |
|
"acc_norm_stderr": 0.011054538377832318 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.028867431449849313, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.028867431449849313 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.23636363636363636, |
|
"acc_stderr": 0.03317505930009179, |
|
"acc_norm": 0.23636363636363636, |
|
"acc_norm_stderr": 0.03317505930009179 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2962056303549572, |
|
"mc1_stderr": 0.015983595101811392, |
|
"mc2": 0.44106273502355514, |
|
"mc2_stderr": 0.01484142550203185 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2550177095631641, |
|
"acc_stderr": 0.01498555953342857, |
|
"acc_norm": 0.33884297520661155, |
|
"acc_norm_stderr": 0.016272952997019124 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v6.0", |
|
"model_sha": "cade76911ad628d7812682ec2bb5a8caac484c1a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |