|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3839590443686007, |
|
"acc_stderr": 0.01421244498065189, |
|
"acc_norm": 0.4274744027303754, |
|
"acc_norm_stderr": 0.014456862944650647 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3906592312288389, |
|
"acc_stderr": 0.004869010152280755, |
|
"acc_norm": 0.5073690499900418, |
|
"acc_norm_stderr": 0.004989239462835229 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5263157894736842, |
|
"acc_stderr": 0.03829509868994727, |
|
"acc_norm": 0.5263157894736842, |
|
"acc_norm_stderr": 0.03829509868994727 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6019417475728155, |
|
"acc_stderr": 0.04846748253977238, |
|
"acc_norm": 0.6019417475728155, |
|
"acc_norm_stderr": 0.04846748253977238 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5044699872286079, |
|
"acc_stderr": 0.01787924897058436, |
|
"acc_norm": 0.5044699872286079, |
|
"acc_norm_stderr": 0.01787924897058436 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37777777777777777, |
|
"acc_stderr": 0.04188307537595853, |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.04188307537595853 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4340425531914894, |
|
"acc_stderr": 0.032400380867927465, |
|
"acc_norm": 0.4340425531914894, |
|
"acc_norm_stderr": 0.032400380867927465 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.41566265060240964, |
|
"acc_stderr": 0.03836722176598052, |
|
"acc_norm": 0.41566265060240964, |
|
"acc_norm_stderr": 0.03836722176598052 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4694533762057878, |
|
"acc_stderr": 0.028345045864840678, |
|
"acc_norm": 0.4694533762057878, |
|
"acc_norm_stderr": 0.028345045864840678 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.484304932735426, |
|
"acc_stderr": 0.0335412657542081, |
|
"acc_norm": 0.484304932735426, |
|
"acc_norm_stderr": 0.0335412657542081 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578756, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578756 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.601010101010101, |
|
"acc_stderr": 0.03488901616852731, |
|
"acc_norm": 0.601010101010101, |
|
"acc_norm_stderr": 0.03488901616852731 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4896551724137931, |
|
"acc_stderr": 0.041657747757287644, |
|
"acc_norm": 0.4896551724137931, |
|
"acc_norm_stderr": 0.041657747757287644 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.04488482852329017, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.04488482852329017 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.47478991596638653, |
|
"acc_stderr": 0.0324371805513741, |
|
"acc_norm": 0.47478991596638653, |
|
"acc_norm_stderr": 0.0324371805513741 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.46923076923076923, |
|
"acc_stderr": 0.02530295889085015, |
|
"acc_norm": 0.46923076923076923, |
|
"acc_norm_stderr": 0.02530295889085015 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956914, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956914 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43349753694581283, |
|
"acc_stderr": 0.034867317274198714, |
|
"acc_norm": 0.43349753694581283, |
|
"acc_norm_stderr": 0.034867317274198714 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4806451612903226, |
|
"acc_stderr": 0.028422687404312107, |
|
"acc_norm": 0.4806451612903226, |
|
"acc_norm_stderr": 0.028422687404312107 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7435897435897436, |
|
"acc_stderr": 0.028605953702004243, |
|
"acc_norm": 0.7435897435897436, |
|
"acc_norm_stderr": 0.028605953702004243 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4830188679245283, |
|
"acc_stderr": 0.030755120364119905, |
|
"acc_norm": 0.4830188679245283, |
|
"acc_norm_stderr": 0.030755120364119905 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5272727272727272, |
|
"acc_stderr": 0.04782001791380061, |
|
"acc_norm": 0.5272727272727272, |
|
"acc_norm_stderr": 0.04782001791380061 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.28888888888888886, |
|
"acc_stderr": 0.027634907264178544, |
|
"acc_norm": 0.28888888888888886, |
|
"acc_norm_stderr": 0.027634907264178544 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3443708609271523, |
|
"acc_stderr": 0.038796870240733264, |
|
"acc_norm": 0.3443708609271523, |
|
"acc_norm_stderr": 0.038796870240733264 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6268656716417911, |
|
"acc_stderr": 0.034198326081760065, |
|
"acc_norm": 0.6268656716417911, |
|
"acc_norm_stderr": 0.034198326081760065 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.43352601156069365, |
|
"acc_stderr": 0.03778621079092055, |
|
"acc_norm": 0.43352601156069365, |
|
"acc_norm_stderr": 0.03778621079092055 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.38095238095238093, |
|
"acc_stderr": 0.025010749116137595, |
|
"acc_norm": 0.38095238095238093, |
|
"acc_norm_stderr": 0.025010749116137595 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.04076663253918567, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.04076663253918567 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5057803468208093, |
|
"acc_stderr": 0.026917296179149116, |
|
"acc_norm": 0.5057803468208093, |
|
"acc_norm_stderr": 0.026917296179149116 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.44785276073619634, |
|
"acc_stderr": 0.03906947479456601, |
|
"acc_norm": 0.44785276073619634, |
|
"acc_norm_stderr": 0.03906947479456601 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4382716049382716, |
|
"acc_stderr": 0.027607914087400477, |
|
"acc_norm": 0.4382716049382716, |
|
"acc_norm_stderr": 0.027607914087400477 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.49222797927461137, |
|
"acc_stderr": 0.03608003225569654, |
|
"acc_norm": 0.49222797927461137, |
|
"acc_norm_stderr": 0.03608003225569654 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.04185774424022057, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.04185774424022057 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.47706422018348627, |
|
"acc_stderr": 0.021414757058175502, |
|
"acc_norm": 0.47706422018348627, |
|
"acc_norm_stderr": 0.021414757058175502 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.0416345303130286, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.0416345303130286 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5130718954248366, |
|
"acc_stderr": 0.028620130800700246, |
|
"acc_norm": 0.5130718954248366, |
|
"acc_norm_stderr": 0.028620130800700246 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.04294340845212094, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.04294340845212094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04068942293855797, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04068942293855797 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4117647058823529, |
|
"acc_stderr": 0.01991037746310594, |
|
"acc_norm": 0.4117647058823529, |
|
"acc_norm_stderr": 0.01991037746310594 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3723404255319149, |
|
"acc_stderr": 0.028838921471251458, |
|
"acc_norm": 0.3723404255319149, |
|
"acc_norm_stderr": 0.028838921471251458 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.39285714285714285, |
|
"acc_stderr": 0.046355501356099754, |
|
"acc_norm": 0.39285714285714285, |
|
"acc_norm_stderr": 0.046355501356099754 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.41203703703703703, |
|
"acc_stderr": 0.03356787758160834, |
|
"acc_norm": 0.41203703703703703, |
|
"acc_norm_stderr": 0.03356787758160834 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.28044692737430166, |
|
"acc_stderr": 0.015024083883322884, |
|
"acc_norm": 0.28044692737430166, |
|
"acc_norm_stderr": 0.015024083883322884 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.049999999999999996, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.049999999999999996 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.44485294117647056, |
|
"acc_stderr": 0.030187532060329387, |
|
"acc_norm": 0.44485294117647056, |
|
"acc_norm_stderr": 0.030187532060329387 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4489795918367347, |
|
"acc_stderr": 0.0318421386668758, |
|
"acc_norm": 0.4489795918367347, |
|
"acc_norm_stderr": 0.0318421386668758 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.620253164556962, |
|
"acc_stderr": 0.0315918875296585, |
|
"acc_norm": 0.620253164556962, |
|
"acc_norm_stderr": 0.0315918875296585 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3468057366362451, |
|
"acc_stderr": 0.012156071332318705, |
|
"acc_norm": 0.3468057366362451, |
|
"acc_norm_stderr": 0.012156071332318705 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.45098039215686275, |
|
"acc_stderr": 0.03492406104163613, |
|
"acc_norm": 0.45098039215686275, |
|
"acc_norm_stderr": 0.03492406104163613 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4303030303030303, |
|
"acc_stderr": 0.03866225962879077, |
|
"acc_norm": 0.4303030303030303, |
|
"acc_norm_stderr": 0.03866225962879077 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2778457772337821, |
|
"mc1_stderr": 0.015680929364024637, |
|
"mc2": 0.44256276494088104, |
|
"mc2_stderr": 0.015431425162220794 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.38488783943329397, |
|
"acc_stderr": 0.016728579701498665, |
|
"acc_norm": 0.4722550177095632, |
|
"acc_norm_stderr": 0.01716386797945601 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Ja3ck/Mistral-instruct-Y24-v5", |
|
"model_sha": "5d268f9f5c87c414661e40ffc464ae5686964586", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |