|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2696245733788396, |
|
"acc_stderr": 0.01296804068686917, |
|
"acc_norm": 0.3370307167235495, |
|
"acc_norm_stderr": 0.013813476652902276 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35680143397729536, |
|
"acc_stderr": 0.004780764443411318, |
|
"acc_norm": 0.44542919737104164, |
|
"acc_norm_stderr": 0.0049599735147725105 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30994152046783624, |
|
"acc_stderr": 0.03546976959393161, |
|
"acc_norm": 0.30994152046783624, |
|
"acc_norm_stderr": 0.03546976959393161 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2621359223300971, |
|
"acc_stderr": 0.04354631077260597, |
|
"acc_norm": 0.2621359223300971, |
|
"acc_norm_stderr": 0.04354631077260597 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24265644955300128, |
|
"acc_stderr": 0.01532988894089986, |
|
"acc_norm": 0.24265644955300128, |
|
"acc_norm_stderr": 0.01532988894089986 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.23703703703703705, |
|
"acc_stderr": 0.03673731683969506, |
|
"acc_norm": 0.23703703703703705, |
|
"acc_norm_stderr": 0.03673731683969506 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3021276595744681, |
|
"acc_stderr": 0.030017554471880554, |
|
"acc_norm": 0.3021276595744681, |
|
"acc_norm_stderr": 0.030017554471880554 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3132530120481928, |
|
"acc_stderr": 0.036108050180310235, |
|
"acc_norm": 0.3132530120481928, |
|
"acc_norm_stderr": 0.036108050180310235 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2540192926045016, |
|
"acc_stderr": 0.02472386150477169, |
|
"acc_norm": 0.2540192926045016, |
|
"acc_norm_stderr": 0.02472386150477169 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.21524663677130046, |
|
"acc_stderr": 0.027584066602208256, |
|
"acc_norm": 0.21524663677130046, |
|
"acc_norm_stderr": 0.027584066602208256 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.20610687022900764, |
|
"acc_stderr": 0.03547771004159464, |
|
"acc_norm": 0.20610687022900764, |
|
"acc_norm_stderr": 0.03547771004159464 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.18686868686868688, |
|
"acc_stderr": 0.027772533334218977, |
|
"acc_norm": 0.18686868686868688, |
|
"acc_norm_stderr": 0.027772533334218977 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2413793103448276, |
|
"acc_stderr": 0.03565998174135302, |
|
"acc_norm": 0.2413793103448276, |
|
"acc_norm_stderr": 0.03565998174135302 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083286, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083286 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.24369747899159663, |
|
"acc_stderr": 0.02788682807838057, |
|
"acc_norm": 0.24369747899159663, |
|
"acc_norm_stderr": 0.02788682807838057 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2641025641025641, |
|
"acc_stderr": 0.02235219373745326, |
|
"acc_norm": 0.2641025641025641, |
|
"acc_norm_stderr": 0.02235219373745326 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.03775251680686371, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.03775251680686371 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.04330043749650742, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.04330043749650742 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.16748768472906403, |
|
"acc_stderr": 0.026273086047535414, |
|
"acc_norm": 0.16748768472906403, |
|
"acc_norm_stderr": 0.026273086047535414 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.27419354838709675, |
|
"acc_stderr": 0.0253781399708852, |
|
"acc_norm": 0.27419354838709675, |
|
"acc_norm_stderr": 0.0253781399708852 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.029343114798094455, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.029343114798094455 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.23018867924528302, |
|
"acc_stderr": 0.02590789712240817, |
|
"acc_norm": 0.23018867924528302, |
|
"acc_norm_stderr": 0.02590789712240817 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2727272727272727, |
|
"acc_stderr": 0.04265792110940589, |
|
"acc_norm": 0.2727272727272727, |
|
"acc_norm_stderr": 0.04265792110940589 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.02671924078371217, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.02671924078371217 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.037579499229433426, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.037579499229433426 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.23383084577114427, |
|
"acc_stderr": 0.029929415408348377, |
|
"acc_norm": 0.23383084577114427, |
|
"acc_norm_stderr": 0.029929415408348377 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.03156809362703173, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.03156809362703173 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2566137566137566, |
|
"acc_stderr": 0.022494510767503154, |
|
"acc_norm": 0.2566137566137566, |
|
"acc_norm_stderr": 0.022494510767503154 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.22916666666666666, |
|
"acc_stderr": 0.035146974678623884, |
|
"acc_norm": 0.22916666666666666, |
|
"acc_norm_stderr": 0.035146974678623884 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816505, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816505 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.0222896388526179, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.0222896388526179 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.22699386503067484, |
|
"acc_stderr": 0.0329109957861577, |
|
"acc_norm": 0.22699386503067484, |
|
"acc_norm_stderr": 0.0329109957861577 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.024922001168886335, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.024922001168886335 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768081, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768081 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.20207253886010362, |
|
"acc_stderr": 0.02897908979429673, |
|
"acc_norm": 0.20207253886010362, |
|
"acc_norm_stderr": 0.02897908979429673 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.21284403669724772, |
|
"acc_stderr": 0.017549376389313694, |
|
"acc_norm": 0.21284403669724772, |
|
"acc_norm_stderr": 0.017549376389313694 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1746031746031746, |
|
"acc_stderr": 0.03395490020856111, |
|
"acc_norm": 0.1746031746031746, |
|
"acc_norm_stderr": 0.03395490020856111 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.023805186524888142, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.023805186524888142 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.2809917355371901, |
|
"acc_stderr": 0.04103203830514512, |
|
"acc_norm": 0.2809917355371901, |
|
"acc_norm_stderr": 0.04103203830514512 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.21710526315789475, |
|
"acc_stderr": 0.03355045304882924, |
|
"acc_norm": 0.21710526315789475, |
|
"acc_norm_stderr": 0.03355045304882924 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.24183006535947713, |
|
"acc_stderr": 0.017322789207784326, |
|
"acc_norm": 0.24183006535947713, |
|
"acc_norm_stderr": 0.017322789207784326 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.22340425531914893, |
|
"acc_stderr": 0.024847921358063962, |
|
"acc_norm": 0.22340425531914893, |
|
"acc_norm_stderr": 0.024847921358063962 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.24107142857142858, |
|
"acc_stderr": 0.04059867246952688, |
|
"acc_norm": 0.24107142857142858, |
|
"acc_norm_stderr": 0.04059867246952688 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.32407407407407407, |
|
"acc_stderr": 0.03191923445686185, |
|
"acc_norm": 0.32407407407407407, |
|
"acc_norm_stderr": 0.03191923445686185 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.036845294917747094, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.036845294917747094 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.027257202606114944, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.027257202606114944 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.18775510204081633, |
|
"acc_stderr": 0.02500025603954621, |
|
"acc_norm": 0.18775510204081633, |
|
"acc_norm_stderr": 0.02500025603954621 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.34177215189873417, |
|
"acc_stderr": 0.030874537537553617, |
|
"acc_norm": 0.34177215189873417, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.23598435462842243, |
|
"acc_stderr": 0.01084480266966268, |
|
"acc_norm": 0.23598435462842243, |
|
"acc_norm_stderr": 0.01084480266966268 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.029331162294251728, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.029331162294251728 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.296969696969697, |
|
"acc_stderr": 0.035679697722680474, |
|
"acc_norm": 0.296969696969697, |
|
"acc_norm_stderr": 0.035679697722680474 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26193390452876375, |
|
"mc1_stderr": 0.015392118805015008, |
|
"mc2": 0.4198042558596364, |
|
"mc2_stderr": 0.0150312470035071 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.33293978748524206, |
|
"acc_stderr": 0.016202431208373776, |
|
"acc_norm": 0.45218417945690675, |
|
"acc_norm_stderr": 0.017111567130916782 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/AIFT-instruct-SFT-1.3B-v2.1", |
|
"model_sha": "0e5b001601e4f2131e800a6a696d1d71469d7356", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |