|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3771331058020478, |
|
"acc_stderr": 0.0141633668961926, |
|
"acc_norm": 0.4351535836177474, |
|
"acc_norm_stderr": 0.01448798619718605 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4151563433578968, |
|
"acc_stderr": 0.004917419367766031, |
|
"acc_norm": 0.5669189404501095, |
|
"acc_norm_stderr": 0.004944889545497955 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49122807017543857, |
|
"acc_stderr": 0.038342347441649924, |
|
"acc_norm": 0.49122807017543857, |
|
"acc_norm_stderr": 0.038342347441649924 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5242718446601942, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.5242718446601942, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5491698595146871, |
|
"acc_stderr": 0.017793297572699034, |
|
"acc_norm": 0.5491698595146871, |
|
"acc_norm_stderr": 0.017793297572699034 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.5111111111111111, |
|
"acc_stderr": 0.04318275491977976, |
|
"acc_norm": 0.5111111111111111, |
|
"acc_norm_stderr": 0.04318275491977976 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.39148936170212767, |
|
"acc_stderr": 0.03190701242326812, |
|
"acc_norm": 0.39148936170212767, |
|
"acc_norm_stderr": 0.03190701242326812 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3855421686746988, |
|
"acc_stderr": 0.037891344246115476, |
|
"acc_norm": 0.3855421686746988, |
|
"acc_norm_stderr": 0.037891344246115476 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5337620578778135, |
|
"acc_stderr": 0.028333277109562804, |
|
"acc_norm": 0.5337620578778135, |
|
"acc_norm_stderr": 0.028333277109562804 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5067264573991032, |
|
"acc_stderr": 0.03355476596234354, |
|
"acc_norm": 0.5067264573991032, |
|
"acc_norm_stderr": 0.03355476596234354 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5038167938931297, |
|
"acc_stderr": 0.04385162325601553, |
|
"acc_norm": 0.5038167938931297, |
|
"acc_norm_stderr": 0.04385162325601553 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5858585858585859, |
|
"acc_stderr": 0.03509438348879629, |
|
"acc_norm": 0.5858585858585859, |
|
"acc_norm_stderr": 0.03509438348879629 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.41379310344827586, |
|
"acc_stderr": 0.04104269211806232, |
|
"acc_norm": 0.41379310344827586, |
|
"acc_norm_stderr": 0.04104269211806232 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4579831932773109, |
|
"acc_stderr": 0.03236361111951941, |
|
"acc_norm": 0.4579831932773109, |
|
"acc_norm_stderr": 0.03236361111951941 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4358974358974359, |
|
"acc_stderr": 0.025141801511177498, |
|
"acc_norm": 0.4358974358974359, |
|
"acc_norm_stderr": 0.025141801511177498 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4351851851851852, |
|
"acc_stderr": 0.04792898170907061, |
|
"acc_norm": 0.4351851851851852, |
|
"acc_norm_stderr": 0.04792898170907061 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.034381579670365446, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.034381579670365446 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.47419354838709676, |
|
"acc_stderr": 0.02840609505765332, |
|
"acc_norm": 0.47419354838709676, |
|
"acc_norm_stderr": 0.02840609505765332 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6581196581196581, |
|
"acc_stderr": 0.03107502852650775, |
|
"acc_norm": 0.6581196581196581, |
|
"acc_norm_stderr": 0.03107502852650775 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4188679245283019, |
|
"acc_stderr": 0.030365050829115208, |
|
"acc_norm": 0.4188679245283019, |
|
"acc_norm_stderr": 0.030365050829115208 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4909090909090909, |
|
"acc_stderr": 0.04788339768702861, |
|
"acc_norm": 0.4909090909090909, |
|
"acc_norm_stderr": 0.04788339768702861 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.025348097468097856, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.025348097468097856 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.038227469376587525, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.038227469376587525 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5472636815920398, |
|
"acc_stderr": 0.03519702717576915, |
|
"acc_norm": 0.5472636815920398, |
|
"acc_norm_stderr": 0.03519702717576915 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4161849710982659, |
|
"acc_stderr": 0.03758517775404947, |
|
"acc_norm": 0.4161849710982659, |
|
"acc_norm_stderr": 0.03758517775404947 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.02345603738398202, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.02345603738398202 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4027777777777778, |
|
"acc_stderr": 0.04101405519842426, |
|
"acc_norm": 0.4027777777777778, |
|
"acc_norm_stderr": 0.04101405519842426 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5144508670520231, |
|
"acc_stderr": 0.026907849856282532, |
|
"acc_norm": 0.5144508670520231, |
|
"acc_norm_stderr": 0.026907849856282532 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4723926380368098, |
|
"acc_stderr": 0.039223782906109894, |
|
"acc_norm": 0.4723926380368098, |
|
"acc_norm_stderr": 0.039223782906109894 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4876543209876543, |
|
"acc_stderr": 0.027812262269327235, |
|
"acc_norm": 0.4876543209876543, |
|
"acc_norm_stderr": 0.027812262269327235 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04605661864718381, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04605661864718381 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5284974093264249, |
|
"acc_stderr": 0.036025735712884414, |
|
"acc_norm": 0.5284974093264249, |
|
"acc_norm_stderr": 0.036025735712884414 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5596330275229358, |
|
"acc_stderr": 0.021284310623761536, |
|
"acc_norm": 0.5596330275229358, |
|
"acc_norm_stderr": 0.021284310623761536 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.04134913018303316, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.04134913018303316 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.46078431372549017, |
|
"acc_stderr": 0.028541722692618874, |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.028541722692618874 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.043913262867240704, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.043913262867240704 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.03988903703336284, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.03988903703336284 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4035947712418301, |
|
"acc_stderr": 0.019848280168401147, |
|
"acc_norm": 0.4035947712418301, |
|
"acc_norm_stderr": 0.019848280168401147 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.30851063829787234, |
|
"acc_stderr": 0.027553366165101376, |
|
"acc_norm": 0.30851063829787234, |
|
"acc_norm_stderr": 0.027553366165101376 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.23214285714285715, |
|
"acc_stderr": 0.04007341809755806, |
|
"acc_norm": 0.23214285714285715, |
|
"acc_norm_stderr": 0.04007341809755806 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.032757734861009996, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.032757734861009996 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.014355911964767857, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.014355911964767857 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.049999999999999996, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.049999999999999996 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4338235294117647, |
|
"acc_stderr": 0.030105636570016643, |
|
"acc_norm": 0.4338235294117647, |
|
"acc_norm_stderr": 0.030105636570016643 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.46530612244897956, |
|
"acc_stderr": 0.03193207024425314, |
|
"acc_norm": 0.46530612244897956, |
|
"acc_norm_stderr": 0.03193207024425314 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5822784810126582, |
|
"acc_stderr": 0.032103530322412685, |
|
"acc_norm": 0.5822784810126582, |
|
"acc_norm_stderr": 0.032103530322412685 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.34615384615384615, |
|
"acc_stderr": 0.012150699768228563, |
|
"acc_norm": 0.34615384615384615, |
|
"acc_norm_stderr": 0.012150699768228563 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.46568627450980393, |
|
"acc_stderr": 0.03501038327635897, |
|
"acc_norm": 0.46568627450980393, |
|
"acc_norm_stderr": 0.03501038327635897 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5515151515151515, |
|
"acc_stderr": 0.038835659779569286, |
|
"acc_norm": 0.5515151515151515, |
|
"acc_norm_stderr": 0.038835659779569286 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24724602203182375, |
|
"mc1_stderr": 0.015102404797359649, |
|
"mc2": 0.39725650408832863, |
|
"mc2_stderr": 0.01469261681765968 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4179456906729634, |
|
"acc_stderr": 0.016957292005279713, |
|
"acc_norm": 0.4817001180637544, |
|
"acc_norm_stderr": 0.01717883663917776 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/aift-llama2-koen-instruct-v1.0", |
|
"model_sha": "54a5a30188cba6af653f20df22ff393472f0e161", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |