|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2235494880546075, |
|
"acc_stderr": 0.012174896631202614, |
|
"acc_norm": 0.26621160409556316, |
|
"acc_norm_stderr": 0.012915774781523216 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.30611431985660226, |
|
"acc_stderr": 0.004599358920909541, |
|
"acc_norm": 0.35222067317267475, |
|
"acc_norm_stderr": 0.004766860907171539 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4619883040935672, |
|
"acc_stderr": 0.03823727092882307, |
|
"acc_norm": 0.4619883040935672, |
|
"acc_norm_stderr": 0.03823727092882307 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4854368932038835, |
|
"acc_stderr": 0.04948637324026637, |
|
"acc_norm": 0.4854368932038835, |
|
"acc_norm_stderr": 0.04948637324026637 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3537675606641124, |
|
"acc_stderr": 0.017098184708161906, |
|
"acc_norm": 0.3537675606641124, |
|
"acc_norm_stderr": 0.017098184708161906 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.04094376269996794, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.04094376269996794 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3276595744680851, |
|
"acc_stderr": 0.030683020843231004, |
|
"acc_norm": 0.3276595744680851, |
|
"acc_norm_stderr": 0.030683020843231004 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3614457831325301, |
|
"acc_stderr": 0.03740059382029319, |
|
"acc_norm": 0.3614457831325301, |
|
"acc_norm_stderr": 0.03740059382029319 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.43086816720257237, |
|
"acc_stderr": 0.028125340983972718, |
|
"acc_norm": 0.43086816720257237, |
|
"acc_norm_stderr": 0.028125340983972718 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.34977578475336324, |
|
"acc_stderr": 0.03200736719484504, |
|
"acc_norm": 0.34977578475336324, |
|
"acc_norm_stderr": 0.03200736719484504 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.366412213740458, |
|
"acc_stderr": 0.04225875451969638, |
|
"acc_norm": 0.366412213740458, |
|
"acc_norm_stderr": 0.04225875451969638 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.42424242424242425, |
|
"acc_stderr": 0.035212249088415824, |
|
"acc_norm": 0.42424242424242425, |
|
"acc_norm_stderr": 0.035212249088415824 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5172413793103449, |
|
"acc_stderr": 0.04164188720169375, |
|
"acc_norm": 0.5172413793103449, |
|
"acc_norm_stderr": 0.04164188720169375 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.04617034827006718, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.04617034827006718 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.46638655462184875, |
|
"acc_stderr": 0.03240501447690071, |
|
"acc_norm": 0.46638655462184875, |
|
"acc_norm_stderr": 0.03240501447690071 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3641025641025641, |
|
"acc_stderr": 0.024396672985094764, |
|
"acc_norm": 0.3641025641025641, |
|
"acc_norm_stderr": 0.024396672985094764 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.05021167315686779, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.05021167315686779 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384739, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384739 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.04826217294139894, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.04826217294139894 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.03430462416103872, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.03430462416103872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.38064516129032255, |
|
"acc_stderr": 0.027621717832907046, |
|
"acc_norm": 0.38064516129032255, |
|
"acc_norm_stderr": 0.027621717832907046 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.594017094017094, |
|
"acc_stderr": 0.03217180182641086, |
|
"acc_norm": 0.594017094017094, |
|
"acc_norm_stderr": 0.03217180182641086 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4037735849056604, |
|
"acc_stderr": 0.03019761160019795, |
|
"acc_norm": 0.4037735849056604, |
|
"acc_norm_stderr": 0.03019761160019795 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.41818181818181815, |
|
"acc_stderr": 0.0472457740573157, |
|
"acc_norm": 0.41818181818181815, |
|
"acc_norm_stderr": 0.0472457740573157 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3074074074074074, |
|
"acc_stderr": 0.02813325257881563, |
|
"acc_norm": 0.3074074074074074, |
|
"acc_norm_stderr": 0.02813325257881563 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.036313298039696545, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.036313298039696545 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.4577114427860697, |
|
"acc_stderr": 0.035228658640995975, |
|
"acc_norm": 0.4577114427860697, |
|
"acc_norm_stderr": 0.035228658640995975 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.03669072477416907, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.03669072477416907 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3941798941798942, |
|
"acc_stderr": 0.02516798233389414, |
|
"acc_norm": 0.3941798941798942, |
|
"acc_norm_stderr": 0.02516798233389414 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.038760854559127644, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.038760854559127644 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4479768786127168, |
|
"acc_stderr": 0.026772990653361813, |
|
"acc_norm": 0.4479768786127168, |
|
"acc_norm_stderr": 0.026772990653361813 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3496932515337423, |
|
"acc_stderr": 0.03746668325470022, |
|
"acc_norm": 0.3496932515337423, |
|
"acc_norm_stderr": 0.03746668325470022 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.027002521034516475, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.027002521034516475 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384739, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384739 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.43005181347150256, |
|
"acc_stderr": 0.035729543331448094, |
|
"acc_norm": 0.43005181347150256, |
|
"acc_norm_stderr": 0.035729543331448094 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.041857744240220575, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.041857744240220575 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3779816513761468, |
|
"acc_stderr": 0.02078918706672812, |
|
"acc_norm": 0.3779816513761468, |
|
"acc_norm_stderr": 0.02078918706672812 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.48412698412698413, |
|
"acc_stderr": 0.04469881854072606, |
|
"acc_norm": 0.48412698412698413, |
|
"acc_norm_stderr": 0.04469881854072606 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4411764705882353, |
|
"acc_stderr": 0.028431095444176643, |
|
"acc_norm": 0.4411764705882353, |
|
"acc_norm_stderr": 0.028431095444176643 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5867768595041323, |
|
"acc_stderr": 0.04495087843548408, |
|
"acc_norm": 0.5867768595041323, |
|
"acc_norm_stderr": 0.04495087843548408 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4144736842105263, |
|
"acc_stderr": 0.04008973785779206, |
|
"acc_norm": 0.4144736842105263, |
|
"acc_norm_stderr": 0.04008973785779206 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3300653594771242, |
|
"acc_stderr": 0.019023726160724556, |
|
"acc_norm": 0.3300653594771242, |
|
"acc_norm_stderr": 0.019023726160724556 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.36879432624113473, |
|
"acc_stderr": 0.028782227561347247, |
|
"acc_norm": 0.36879432624113473, |
|
"acc_norm_stderr": 0.028782227561347247 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.032757734861009996, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.032757734861009996 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23910614525139665, |
|
"acc_stderr": 0.014265554192331161, |
|
"acc_norm": 0.23910614525139665, |
|
"acc_norm_stderr": 0.014265554192331161 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3161764705882353, |
|
"acc_stderr": 0.02824568739146291, |
|
"acc_norm": 0.3161764705882353, |
|
"acc_norm_stderr": 0.02824568739146291 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5755102040816327, |
|
"acc_stderr": 0.031642094879429414, |
|
"acc_norm": 0.5755102040816327, |
|
"acc_norm_stderr": 0.031642094879429414 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.350210970464135, |
|
"acc_stderr": 0.03105239193758435, |
|
"acc_norm": 0.350210970464135, |
|
"acc_norm_stderr": 0.03105239193758435 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3200782268578879, |
|
"acc_stderr": 0.011914791947638519, |
|
"acc_norm": 0.3200782268578879, |
|
"acc_norm_stderr": 0.011914791947638519 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.03426712349247271, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.03426712349247271 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3696969696969697, |
|
"acc_stderr": 0.03769430314512567, |
|
"acc_norm": 0.3696969696969697, |
|
"acc_norm_stderr": 0.03769430314512567 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3072215422276622, |
|
"mc1_stderr": 0.016150201321323013, |
|
"mc2": 0.48699251655132686, |
|
"mc2_stderr": 0.016174272005682996 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.33530106257378983, |
|
"acc_stderr": 0.016230981232989827, |
|
"acc_norm": 0.3742621015348288, |
|
"acc_norm_stderr": 0.016637917789798732 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "01-ai/Yi-6B-Chat", |
|
"model_sha": "36326f9bc1c8020e0cf29ea830ee5e6679a66a23", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |