|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2713310580204778, |
|
"acc_stderr": 0.012993807727545784, |
|
"acc_norm": 0.3319112627986348, |
|
"acc_norm_stderr": 0.013760988200880534 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35590519816769567, |
|
"acc_stderr": 0.004778081784542411, |
|
"acc_norm": 0.44503087034455285, |
|
"acc_norm_stderr": 0.004959535443170614 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.3216374269005848, |
|
"acc_stderr": 0.03582529442573122, |
|
"acc_norm": 0.3216374269005848, |
|
"acc_norm_stderr": 0.03582529442573122 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690877, |
|
"acc_norm": 0.2524271844660194, |
|
"acc_norm_stderr": 0.04301250399690877 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2388250319284802, |
|
"acc_stderr": 0.015246803197398687, |
|
"acc_norm": 0.2388250319284802, |
|
"acc_norm_stderr": 0.015246803197398687 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2074074074074074, |
|
"acc_stderr": 0.03502553170678316, |
|
"acc_norm": 0.2074074074074074, |
|
"acc_norm_stderr": 0.03502553170678316 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3021276595744681, |
|
"acc_stderr": 0.03001755447188055, |
|
"acc_norm": 0.3021276595744681, |
|
"acc_norm_stderr": 0.03001755447188055 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3313253012048193, |
|
"acc_stderr": 0.03664314777288086, |
|
"acc_norm": 0.3313253012048193, |
|
"acc_norm_stderr": 0.03664314777288086 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.26366559485530544, |
|
"acc_stderr": 0.02502553850053234, |
|
"acc_norm": 0.26366559485530544, |
|
"acc_norm_stderr": 0.02502553850053234 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2825112107623318, |
|
"acc_stderr": 0.030216831011508762, |
|
"acc_norm": 0.2825112107623318, |
|
"acc_norm_stderr": 0.030216831011508762 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.25190839694656486, |
|
"acc_stderr": 0.03807387116306086, |
|
"acc_norm": 0.25190839694656486, |
|
"acc_norm_stderr": 0.03807387116306086 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768077, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768077 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.17676767676767677, |
|
"acc_stderr": 0.027178752639044915, |
|
"acc_norm": 0.17676767676767677, |
|
"acc_norm_stderr": 0.027178752639044915 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2413793103448276, |
|
"acc_stderr": 0.03565998174135302, |
|
"acc_norm": 0.2413793103448276, |
|
"acc_norm_stderr": 0.03565998174135302 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.026653531596715487, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.026653531596715487 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2230769230769231, |
|
"acc_stderr": 0.02110773012724401, |
|
"acc_norm": 0.2230769230769231, |
|
"acc_norm_stderr": 0.02110773012724401 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.03775251680686371, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.03775251680686371 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.04330043749650742, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.04330043749650742 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.24630541871921183, |
|
"acc_stderr": 0.030315099285617715, |
|
"acc_norm": 0.24630541871921183, |
|
"acc_norm_stderr": 0.030315099285617715 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.22258064516129034, |
|
"acc_stderr": 0.023664216671642507, |
|
"acc_norm": 0.22258064516129034, |
|
"acc_norm_stderr": 0.023664216671642507 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.26495726495726496, |
|
"acc_stderr": 0.028911208802749465, |
|
"acc_norm": 0.26495726495726496, |
|
"acc_norm_stderr": 0.028911208802749465 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.21509433962264152, |
|
"acc_stderr": 0.025288394502891373, |
|
"acc_norm": 0.21509433962264152, |
|
"acc_norm_stderr": 0.025288394502891373 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.37272727272727274, |
|
"acc_stderr": 0.04631381319425463, |
|
"acc_norm": 0.37272727272727274, |
|
"acc_norm_stderr": 0.04631381319425463 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.026962424325073828, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.026962424325073828 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.1986754966887417, |
|
"acc_stderr": 0.032578473844367746, |
|
"acc_norm": 0.1986754966887417, |
|
"acc_norm_stderr": 0.032578473844367746 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.25870646766169153, |
|
"acc_stderr": 0.03096590312357304, |
|
"acc_norm": 0.25870646766169153, |
|
"acc_norm_stderr": 0.03096590312357304 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.03186209851641144, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.03186209851641144 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2328042328042328, |
|
"acc_stderr": 0.02176596167215454, |
|
"acc_norm": 0.2328042328042328, |
|
"acc_norm_stderr": 0.02176596167215454 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.22916666666666666, |
|
"acc_stderr": 0.035146974678623884, |
|
"acc_norm": 0.22916666666666666, |
|
"acc_norm_stderr": 0.035146974678623884 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23699421965317918, |
|
"acc_stderr": 0.02289408248992599, |
|
"acc_norm": 0.23699421965317918, |
|
"acc_norm_stderr": 0.02289408248992599 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.22699386503067484, |
|
"acc_stderr": 0.03291099578615771, |
|
"acc_norm": 0.22699386503067484, |
|
"acc_norm_stderr": 0.03291099578615771 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2654320987654321, |
|
"acc_stderr": 0.02456922360046085, |
|
"acc_norm": 0.2654320987654321, |
|
"acc_norm_stderr": 0.02456922360046085 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.20207253886010362, |
|
"acc_stderr": 0.02897908979429673, |
|
"acc_norm": 0.20207253886010362, |
|
"acc_norm_stderr": 0.02897908979429673 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.20175438596491227, |
|
"acc_stderr": 0.03775205013583638, |
|
"acc_norm": 0.20175438596491227, |
|
"acc_norm_stderr": 0.03775205013583638 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.1981651376146789, |
|
"acc_stderr": 0.01709057380421789, |
|
"acc_norm": 0.1981651376146789, |
|
"acc_norm_stderr": 0.01709057380421789 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1984126984126984, |
|
"acc_stderr": 0.035670166752768635, |
|
"acc_norm": 0.1984126984126984, |
|
"acc_norm_stderr": 0.035670166752768635 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.023929155517351284, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.023929155517351284 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.256198347107438, |
|
"acc_stderr": 0.03984979653302872, |
|
"acc_norm": 0.256198347107438, |
|
"acc_norm_stderr": 0.03984979653302872 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17763157894736842, |
|
"acc_stderr": 0.03110318238312338, |
|
"acc_norm": 0.17763157894736842, |
|
"acc_norm_stderr": 0.03110318238312338 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.01774089950917779, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.01774089950917779 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24113475177304963, |
|
"acc_stderr": 0.025518731049537766, |
|
"acc_norm": 0.24113475177304963, |
|
"acc_norm_stderr": 0.025518731049537766 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.26785714285714285, |
|
"acc_stderr": 0.04203277291467764, |
|
"acc_norm": 0.26785714285714285, |
|
"acc_norm_stderr": 0.04203277291467764 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.25462962962962965, |
|
"acc_stderr": 0.029711275860005344, |
|
"acc_norm": 0.25462962962962965, |
|
"acc_norm_stderr": 0.029711275860005344 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24581005586592178, |
|
"acc_stderr": 0.014400296429225608, |
|
"acc_norm": 0.24581005586592178, |
|
"acc_norm_stderr": 0.014400296429225608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384741, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384741 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.16176470588235295, |
|
"acc_stderr": 0.02236867256288675, |
|
"acc_norm": 0.16176470588235295, |
|
"acc_norm_stderr": 0.02236867256288675 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.1836734693877551, |
|
"acc_stderr": 0.024789071332007636, |
|
"acc_norm": 0.1836734693877551, |
|
"acc_norm_stderr": 0.024789071332007636 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2742616033755274, |
|
"acc_stderr": 0.029041333510598046, |
|
"acc_norm": 0.2742616033755274, |
|
"acc_norm_stderr": 0.029041333510598046 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.23663624511082137, |
|
"acc_stderr": 0.010855137351572742, |
|
"acc_norm": 0.23663624511082137, |
|
"acc_norm_stderr": 0.010855137351572742 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.02977177522814563, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.02977177522814563 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24848484848484848, |
|
"acc_stderr": 0.03374402644139404, |
|
"acc_norm": 0.24848484848484848, |
|
"acc_norm_stderr": 0.03374402644139404 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24112607099143207, |
|
"mc1_stderr": 0.014974827279752329, |
|
"mc2": 0.4094493980194844, |
|
"mc2_stderr": 0.014890936810930833 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.30932703659976385, |
|
"acc_stderr": 0.015891320505520886, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.01701403811929746 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/AIFT-instruct-SFT-1.3B-v1.1", |
|
"model_sha": "2aae4491faed1be050cac64de55d0a79288e96a9", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |