results
/
DooDooHyun
/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.54
/result_2024-01-21 09:29:31.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.2721843003412969, | |
"acc_stderr": 0.013006600406423707, | |
"acc_norm": 0.3242320819112628, | |
"acc_norm_stderr": 0.01367881039951882 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.3593905596494722, | |
"acc_stderr": 0.004788412062375701, | |
"acc_norm": 0.4607647878908584, | |
"acc_norm_stderr": 0.004974395131539591 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.30994152046783624, | |
"acc_stderr": 0.035469769593931624, | |
"acc_norm": 0.30994152046783624, | |
"acc_norm_stderr": 0.035469769593931624 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.18446601941747573, | |
"acc_stderr": 0.03840423627288276, | |
"acc_norm": 0.18446601941747573, | |
"acc_norm_stderr": 0.03840423627288276 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.23499361430395913, | |
"acc_stderr": 0.015162024152278441, | |
"acc_norm": 0.23499361430395913, | |
"acc_norm_stderr": 0.015162024152278441 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.2222222222222222, | |
"acc_stderr": 0.035914440841969694, | |
"acc_norm": 0.2222222222222222, | |
"acc_norm_stderr": 0.035914440841969694 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.32, | |
"acc_stderr": 0.04688261722621504, | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.04688261722621504 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.3148936170212766, | |
"acc_stderr": 0.030363582197238174, | |
"acc_norm": 0.3148936170212766, | |
"acc_norm_stderr": 0.030363582197238174 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.3132530120481928, | |
"acc_stderr": 0.03610805018031023, | |
"acc_norm": 0.3132530120481928, | |
"acc_norm_stderr": 0.03610805018031023 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.20257234726688103, | |
"acc_stderr": 0.022827317491059682, | |
"acc_norm": 0.20257234726688103, | |
"acc_norm_stderr": 0.022827317491059682 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.35874439461883406, | |
"acc_stderr": 0.03219079200419997, | |
"acc_norm": 0.35874439461883406, | |
"acc_norm_stderr": 0.03219079200419997 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.29770992366412213, | |
"acc_stderr": 0.040103589424622034, | |
"acc_norm": 0.29770992366412213, | |
"acc_norm_stderr": 0.040103589424622034 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.20202020202020202, | |
"acc_stderr": 0.028606204289229872, | |
"acc_norm": 0.20202020202020202, | |
"acc_norm_stderr": 0.028606204289229872 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.22758620689655173, | |
"acc_stderr": 0.03493950380131184, | |
"acc_norm": 0.22758620689655173, | |
"acc_norm_stderr": 0.03493950380131184 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.20588235294117646, | |
"acc_stderr": 0.040233822736177455, | |
"acc_norm": 0.20588235294117646, | |
"acc_norm_stderr": 0.040233822736177455 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.24369747899159663, | |
"acc_stderr": 0.027886828078380558, | |
"acc_norm": 0.24369747899159663, | |
"acc_norm_stderr": 0.027886828078380558 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.24871794871794872, | |
"acc_stderr": 0.021916957709213796, | |
"acc_norm": 0.24871794871794872, | |
"acc_norm_stderr": 0.021916957709213796 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.044084400227680794, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.044084400227680794 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.04852365870939099, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.04852365870939099 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.21296296296296297, | |
"acc_stderr": 0.03957835471980981, | |
"acc_norm": 0.21296296296296297, | |
"acc_norm_stderr": 0.03957835471980981 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.2315270935960591, | |
"acc_stderr": 0.029678333141444434, | |
"acc_norm": 0.2315270935960591, | |
"acc_norm_stderr": 0.029678333141444434 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.21935483870967742, | |
"acc_stderr": 0.023540799358723268, | |
"acc_norm": 0.21935483870967742, | |
"acc_norm_stderr": 0.023540799358723268 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.2564102564102564, | |
"acc_stderr": 0.028605953702004274, | |
"acc_norm": 0.2564102564102564, | |
"acc_norm_stderr": 0.028605953702004274 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.22641509433962265, | |
"acc_stderr": 0.02575755989310672, | |
"acc_norm": 0.22641509433962265, | |
"acc_norm_stderr": 0.02575755989310672 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.2727272727272727, | |
"acc_stderr": 0.04265792110940589, | |
"acc_norm": 0.2727272727272727, | |
"acc_norm_stderr": 0.04265792110940589 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.24074074074074073, | |
"acc_stderr": 0.02606715922227578, | |
"acc_norm": 0.24074074074074073, | |
"acc_norm_stderr": 0.02606715922227578 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2781456953642384, | |
"acc_stderr": 0.03658603262763743, | |
"acc_norm": 0.2781456953642384, | |
"acc_norm_stderr": 0.03658603262763743 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.2885572139303483, | |
"acc_stderr": 0.03203841040213321, | |
"acc_norm": 0.2885572139303483, | |
"acc_norm_stderr": 0.03203841040213321 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.24277456647398843, | |
"acc_stderr": 0.0326926380614177, | |
"acc_norm": 0.24277456647398843, | |
"acc_norm_stderr": 0.0326926380614177 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.23544973544973544, | |
"acc_stderr": 0.021851509822031715, | |
"acc_norm": 0.23544973544973544, | |
"acc_norm_stderr": 0.021851509822031715 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.20833333333333334, | |
"acc_stderr": 0.033961162058453336, | |
"acc_norm": 0.20833333333333334, | |
"acc_norm_stderr": 0.033961162058453336 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.23, | |
"acc_stderr": 0.04229525846816505, | |
"acc_norm": 0.23, | |
"acc_norm_stderr": 0.04229525846816505 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.04560480215720684, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720684 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.23121387283236994, | |
"acc_stderr": 0.02269865716785571, | |
"acc_norm": 0.23121387283236994, | |
"acc_norm_stderr": 0.02269865716785571 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.294478527607362, | |
"acc_stderr": 0.03581165790474082, | |
"acc_norm": 0.294478527607362, | |
"acc_norm_stderr": 0.03581165790474082 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.27469135802469136, | |
"acc_stderr": 0.024836057868294677, | |
"acc_norm": 0.27469135802469136, | |
"acc_norm_stderr": 0.024836057868294677 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.044619604333847394, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.044619604333847394 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.22797927461139897, | |
"acc_stderr": 0.03027690994517826, | |
"acc_norm": 0.22797927461139897, | |
"acc_norm_stderr": 0.03027690994517826 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.20175438596491227, | |
"acc_stderr": 0.037752050135836386, | |
"acc_norm": 0.20175438596491227, | |
"acc_norm_stderr": 0.037752050135836386 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.23302752293577983, | |
"acc_stderr": 0.018125669180861507, | |
"acc_norm": 0.23302752293577983, | |
"acc_norm_stderr": 0.018125669180861507 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.18253968253968253, | |
"acc_stderr": 0.034550710191021496, | |
"acc_norm": 0.18253968253968253, | |
"acc_norm_stderr": 0.034550710191021496 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.26143790849673204, | |
"acc_stderr": 0.025160998214292456, | |
"acc_norm": 0.26143790849673204, | |
"acc_norm_stderr": 0.025160998214292456 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.23, | |
"acc_stderr": 0.04229525846816506, | |
"acc_norm": 0.23, | |
"acc_norm_stderr": 0.04229525846816506 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.3305785123966942, | |
"acc_stderr": 0.04294340845212093, | |
"acc_norm": 0.3305785123966942, | |
"acc_norm_stderr": 0.04294340845212093 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.14473684210526316, | |
"acc_stderr": 0.0286319518459304, | |
"acc_norm": 0.14473684210526316, | |
"acc_norm_stderr": 0.0286319518459304 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.2549019607843137, | |
"acc_stderr": 0.017630827375148383, | |
"acc_norm": 0.2549019607843137, | |
"acc_norm_stderr": 0.017630827375148383 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.30141843971631205, | |
"acc_stderr": 0.02737412888263115, | |
"acc_norm": 0.30141843971631205, | |
"acc_norm_stderr": 0.02737412888263115 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.2767857142857143, | |
"acc_stderr": 0.042466243366976235, | |
"acc_norm": 0.2767857142857143, | |
"acc_norm_stderr": 0.042466243366976235 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.30092592592592593, | |
"acc_stderr": 0.03128039084329883, | |
"acc_norm": 0.30092592592592593, | |
"acc_norm_stderr": 0.03128039084329883 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.27039106145251396, | |
"acc_stderr": 0.014854993938010083, | |
"acc_norm": 0.27039106145251396, | |
"acc_norm_stderr": 0.014854993938010083 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206845, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206845 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.3602941176470588, | |
"acc_stderr": 0.029163128570670736, | |
"acc_norm": 0.3602941176470588, | |
"acc_norm_stderr": 0.029163128570670736 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.24081632653061225, | |
"acc_stderr": 0.02737294220178816, | |
"acc_norm": 0.24081632653061225, | |
"acc_norm_stderr": 0.02737294220178816 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.25316455696202533, | |
"acc_stderr": 0.028304657943035286, | |
"acc_norm": 0.25316455696202533, | |
"acc_norm_stderr": 0.028304657943035286 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.26140808344198174, | |
"acc_stderr": 0.011222528169771314, | |
"acc_norm": 0.26140808344198174, | |
"acc_norm_stderr": 0.011222528169771314 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.24509803921568626, | |
"acc_stderr": 0.03019028245350195, | |
"acc_norm": 0.24509803921568626, | |
"acc_norm_stderr": 0.03019028245350195 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.30303030303030304, | |
"acc_stderr": 0.03588624800091709, | |
"acc_norm": 0.30303030303030304, | |
"acc_norm_stderr": 0.03588624800091709 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.27050183598531213, | |
"mc1_stderr": 0.015550778332842885, | |
"mc2": 0.41628207118178134, | |
"mc2_stderr": 0.01511903356687514 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.26092089728453366, | |
"acc_stderr": 0.015097836279964201, | |
"acc_norm": 0.3482880755608028, | |
"acc_norm_stderr": 0.016379926739148037 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.54", | |
"model_sha": "baa9eb0e08e09ef6bb1fcaa76db69d4e64cb48c1", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |