|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3455631399317406, |
|
"acc_stderr": 0.01389693846114568, |
|
"acc_norm": 0.3890784982935154, |
|
"acc_norm_stderr": 0.014247309976045605 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3761202947619996, |
|
"acc_stderr": 0.004834207964061324, |
|
"acc_norm": 0.48834893447520417, |
|
"acc_norm_stderr": 0.004988426528513012 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5087719298245614, |
|
"acc_stderr": 0.03834234744164993, |
|
"acc_norm": 0.5087719298245614, |
|
"acc_norm_stderr": 0.03834234744164993 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.048657775704107675, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.048657775704107675 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.46871008939974457, |
|
"acc_stderr": 0.017844918090468544, |
|
"acc_norm": 0.46871008939974457, |
|
"acc_norm_stderr": 0.017844918090468544 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4222222222222222, |
|
"acc_stderr": 0.04266763404099582, |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.04266763404099582 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.42127659574468085, |
|
"acc_stderr": 0.03227834510146267, |
|
"acc_norm": 0.42127659574468085, |
|
"acc_norm_stderr": 0.03227834510146267 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.39759036144578314, |
|
"acc_stderr": 0.03809973084540219, |
|
"acc_norm": 0.39759036144578314, |
|
"acc_norm_stderr": 0.03809973084540219 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4983922829581994, |
|
"acc_stderr": 0.02839794490780661, |
|
"acc_norm": 0.4983922829581994, |
|
"acc_norm_stderr": 0.02839794490780661 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4618834080717489, |
|
"acc_stderr": 0.033460150119732274, |
|
"acc_norm": 0.4618834080717489, |
|
"acc_norm_stderr": 0.033460150119732274 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.46564885496183206, |
|
"acc_stderr": 0.04374928560599738, |
|
"acc_norm": 0.46564885496183206, |
|
"acc_norm_stderr": 0.04374928560599738 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5404040404040404, |
|
"acc_stderr": 0.035507024651313425, |
|
"acc_norm": 0.5404040404040404, |
|
"acc_norm_stderr": 0.035507024651313425 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.45517241379310347, |
|
"acc_stderr": 0.04149886942192117, |
|
"acc_norm": 0.45517241379310347, |
|
"acc_norm_stderr": 0.04149886942192117 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171452, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171452 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5462184873949579, |
|
"acc_stderr": 0.03233943468182088, |
|
"acc_norm": 0.5462184873949579, |
|
"acc_norm_stderr": 0.03233943468182088 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4641025641025641, |
|
"acc_stderr": 0.025285585990017834, |
|
"acc_norm": 0.4641025641025641, |
|
"acc_norm_stderr": 0.025285585990017834 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43349753694581283, |
|
"acc_stderr": 0.034867317274198714, |
|
"acc_norm": 0.43349753694581283, |
|
"acc_norm_stderr": 0.034867317274198714 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.49032258064516127, |
|
"acc_stderr": 0.02843867799890955, |
|
"acc_norm": 0.49032258064516127, |
|
"acc_norm_stderr": 0.02843867799890955 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7393162393162394, |
|
"acc_stderr": 0.028760348956523414, |
|
"acc_norm": 0.7393162393162394, |
|
"acc_norm_stderr": 0.028760348956523414 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.44150943396226416, |
|
"acc_stderr": 0.030561590426731833, |
|
"acc_norm": 0.44150943396226416, |
|
"acc_norm_stderr": 0.030561590426731833 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5272727272727272, |
|
"acc_stderr": 0.04782001791380061, |
|
"acc_norm": 0.5272727272727272, |
|
"acc_norm_stderr": 0.04782001791380061 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.337037037037037, |
|
"acc_stderr": 0.028820884666253255, |
|
"acc_norm": 0.337037037037037, |
|
"acc_norm_stderr": 0.028820884666253255 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.03631329803969653, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.03631329803969653 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5920398009950248, |
|
"acc_stderr": 0.03475116365194092, |
|
"acc_norm": 0.5920398009950248, |
|
"acc_norm_stderr": 0.03475116365194092 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3988439306358382, |
|
"acc_stderr": 0.03733626655383509, |
|
"acc_norm": 0.3988439306358382, |
|
"acc_norm_stderr": 0.03733626655383509 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3835978835978836, |
|
"acc_stderr": 0.025043757318520193, |
|
"acc_norm": 0.3835978835978836, |
|
"acc_norm_stderr": 0.025043757318520193 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3541666666666667, |
|
"acc_stderr": 0.039994111357535424, |
|
"acc_norm": 0.3541666666666667, |
|
"acc_norm_stderr": 0.039994111357535424 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.04975698519562426, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.04975698519562426 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5549132947976878, |
|
"acc_stderr": 0.02675625512966377, |
|
"acc_norm": 0.5549132947976878, |
|
"acc_norm_stderr": 0.02675625512966377 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.49693251533742333, |
|
"acc_stderr": 0.03928297078179662, |
|
"acc_norm": 0.49693251533742333, |
|
"acc_norm_stderr": 0.03928297078179662 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4567901234567901, |
|
"acc_stderr": 0.027716661650194038, |
|
"acc_norm": 0.4567901234567901, |
|
"acc_norm_stderr": 0.027716661650194038 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5440414507772021, |
|
"acc_stderr": 0.035944137112724366, |
|
"acc_norm": 0.5440414507772021, |
|
"acc_norm_stderr": 0.035944137112724366 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.043391383225798594, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.043391383225798594 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4990825688073395, |
|
"acc_stderr": 0.021437287056051215, |
|
"acc_norm": 0.4990825688073395, |
|
"acc_norm_stderr": 0.021437287056051215 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3968253968253968, |
|
"acc_stderr": 0.04375888492727061, |
|
"acc_norm": 0.3968253968253968, |
|
"acc_norm_stderr": 0.04375888492727061 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.46078431372549017, |
|
"acc_stderr": 0.028541722692618874, |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.028541722692618874 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.049999999999999996, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.049999999999999996 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6776859504132231, |
|
"acc_stderr": 0.042664163633521685, |
|
"acc_norm": 0.6776859504132231, |
|
"acc_norm_stderr": 0.042664163633521685 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4276315789473684, |
|
"acc_stderr": 0.040260970832965585, |
|
"acc_norm": 0.4276315789473684, |
|
"acc_norm_stderr": 0.040260970832965585 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.40522875816993464, |
|
"acc_stderr": 0.019861155193829156, |
|
"acc_norm": 0.40522875816993464, |
|
"acc_norm_stderr": 0.019861155193829156 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.35106382978723405, |
|
"acc_stderr": 0.028473501272963775, |
|
"acc_norm": 0.35106382978723405, |
|
"acc_norm_stderr": 0.028473501272963775 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.41964285714285715, |
|
"acc_stderr": 0.04684099321077106, |
|
"acc_norm": 0.41964285714285715, |
|
"acc_norm_stderr": 0.04684099321077106 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.39814814814814814, |
|
"acc_stderr": 0.033384734032074016, |
|
"acc_norm": 0.39814814814814814, |
|
"acc_norm_stderr": 0.033384734032074016 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.34413407821229053, |
|
"acc_stderr": 0.015889221313307094, |
|
"acc_norm": 0.34413407821229053, |
|
"acc_norm_stderr": 0.015889221313307094 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4411764705882353, |
|
"acc_stderr": 0.030161911930767105, |
|
"acc_norm": 0.4411764705882353, |
|
"acc_norm_stderr": 0.030161911930767105 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5551020408163265, |
|
"acc_stderr": 0.031814251181977865, |
|
"acc_norm": 0.5551020408163265, |
|
"acc_norm_stderr": 0.031814251181977865 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5991561181434599, |
|
"acc_stderr": 0.031900803894732356, |
|
"acc_norm": 0.5991561181434599, |
|
"acc_norm_stderr": 0.031900803894732356 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3363754889178618, |
|
"acc_stderr": 0.012067083079452225, |
|
"acc_norm": 0.3363754889178618, |
|
"acc_norm_stderr": 0.012067083079452225 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4803921568627451, |
|
"acc_stderr": 0.03506612560524866, |
|
"acc_norm": 0.4803921568627451, |
|
"acc_norm_stderr": 0.03506612560524866 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4909090909090909, |
|
"acc_stderr": 0.0390369864774844, |
|
"acc_norm": 0.4909090909090909, |
|
"acc_norm_stderr": 0.0390369864774844 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2974296205630355, |
|
"mc1_stderr": 0.01600265148736101, |
|
"mc2": 0.47256825783555356, |
|
"mc2_stderr": 0.015562189062650065 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5171192443919717, |
|
"acc_stderr": 0.01718027524608563, |
|
"acc_norm": 0.5525383707201889, |
|
"acc_norm_stderr": 0.017095190301500578 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7", |
|
"model_sha": "99abb58ee6efae9e5cdc9bc427c79bc4a7b6f1a2", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |