results
/
gangkongkong
/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge
/result_2023-11-01 16:20:48.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3626279863481229, | |
"acc_stderr": 0.014049106564955005, | |
"acc_norm": 0.43686006825938567, | |
"acc_norm_stderr": 0.014494421584256512 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4130651264688309, | |
"acc_stderr": 0.0049137803474988756, | |
"acc_norm": 0.5571599283011353, | |
"acc_norm_stderr": 0.004957068377516513 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.47953216374269003, | |
"acc_stderr": 0.038316105328219316, | |
"acc_norm": 0.47953216374269003, | |
"acc_norm_stderr": 0.038316105328219316 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.4368932038834951, | |
"acc_stderr": 0.04911147107365777, | |
"acc_norm": 0.4368932038834951, | |
"acc_norm_stderr": 0.04911147107365777 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.49680715197956576, | |
"acc_stderr": 0.017879598945933065, | |
"acc_norm": 0.49680715197956576, | |
"acc_norm_stderr": 0.017879598945933065 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.42962962962962964, | |
"acc_stderr": 0.04276349494376598, | |
"acc_norm": 0.42962962962962964, | |
"acc_norm_stderr": 0.04276349494376598 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.04408440022768077, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.04408440022768077 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.40425531914893614, | |
"acc_stderr": 0.032081157507886836, | |
"acc_norm": 0.40425531914893614, | |
"acc_norm_stderr": 0.032081157507886836 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.3855421686746988, | |
"acc_stderr": 0.03789134424611548, | |
"acc_norm": 0.3855421686746988, | |
"acc_norm_stderr": 0.03789134424611548 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.47266881028938906, | |
"acc_stderr": 0.02835563356832818, | |
"acc_norm": 0.47266881028938906, | |
"acc_norm_stderr": 0.02835563356832818 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.4977578475336323, | |
"acc_stderr": 0.033557465352232634, | |
"acc_norm": 0.4977578475336323, | |
"acc_norm_stderr": 0.033557465352232634 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4351145038167939, | |
"acc_stderr": 0.04348208051644858, | |
"acc_norm": 0.4351145038167939, | |
"acc_norm_stderr": 0.04348208051644858 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5050505050505051, | |
"acc_stderr": 0.035621707606254015, | |
"acc_norm": 0.5050505050505051, | |
"acc_norm_stderr": 0.035621707606254015 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.31724137931034485, | |
"acc_stderr": 0.03878352372138621, | |
"acc_norm": 0.31724137931034485, | |
"acc_norm_stderr": 0.03878352372138621 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.22549019607843138, | |
"acc_stderr": 0.041583075330832865, | |
"acc_norm": 0.22549019607843138, | |
"acc_norm_stderr": 0.041583075330832865 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.3949579831932773, | |
"acc_stderr": 0.03175367846096625, | |
"acc_norm": 0.3949579831932773, | |
"acc_norm_stderr": 0.03175367846096625 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.3769230769230769, | |
"acc_stderr": 0.024570975364225995, | |
"acc_norm": 0.3769230769230769, | |
"acc_norm_stderr": 0.024570975364225995 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.050161355804659205, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.050161355804659205 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5185185185185185, | |
"acc_stderr": 0.04830366024635331, | |
"acc_norm": 0.5185185185185185, | |
"acc_norm_stderr": 0.04830366024635331 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3448275862068966, | |
"acc_stderr": 0.03344283744280458, | |
"acc_norm": 0.3448275862068966, | |
"acc_norm_stderr": 0.03344283744280458 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.4161290322580645, | |
"acc_stderr": 0.028040981380761547, | |
"acc_norm": 0.4161290322580645, | |
"acc_norm_stderr": 0.028040981380761547 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6153846153846154, | |
"acc_stderr": 0.03187195347942466, | |
"acc_norm": 0.6153846153846154, | |
"acc_norm_stderr": 0.03187195347942466 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.39622641509433965, | |
"acc_stderr": 0.030102793781791194, | |
"acc_norm": 0.39622641509433965, | |
"acc_norm_stderr": 0.030102793781791194 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.45454545454545453, | |
"acc_stderr": 0.04769300568972743, | |
"acc_norm": 0.45454545454545453, | |
"acc_norm_stderr": 0.04769300568972743 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.2740740740740741, | |
"acc_stderr": 0.027195934804085626, | |
"acc_norm": 0.2740740740740741, | |
"acc_norm_stderr": 0.027195934804085626 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.271523178807947, | |
"acc_stderr": 0.03631329803969654, | |
"acc_norm": 0.271523178807947, | |
"acc_norm_stderr": 0.03631329803969654 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.4975124378109453, | |
"acc_stderr": 0.03535490150137288, | |
"acc_norm": 0.4975124378109453, | |
"acc_norm_stderr": 0.03535490150137288 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3236994219653179, | |
"acc_stderr": 0.03567603799639172, | |
"acc_norm": 0.3236994219653179, | |
"acc_norm_stderr": 0.03567603799639172 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.25132275132275134, | |
"acc_stderr": 0.022340482339643895, | |
"acc_norm": 0.25132275132275134, | |
"acc_norm_stderr": 0.022340482339643895 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3611111111111111, | |
"acc_stderr": 0.040166600304512336, | |
"acc_norm": 0.3611111111111111, | |
"acc_norm_stderr": 0.040166600304512336 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.04725815626252606, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.04725815626252606 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.55, | |
"acc_stderr": 0.05, | |
"acc_norm": 0.55, | |
"acc_norm_stderr": 0.05 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.47398843930635837, | |
"acc_stderr": 0.02688264343402289, | |
"acc_norm": 0.47398843930635837, | |
"acc_norm_stderr": 0.02688264343402289 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.4110429447852761, | |
"acc_stderr": 0.038656978537853624, | |
"acc_norm": 0.4110429447852761, | |
"acc_norm_stderr": 0.038656978537853624 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.4506172839506173, | |
"acc_stderr": 0.0276847214156562, | |
"acc_norm": 0.4506172839506173, | |
"acc_norm_stderr": 0.0276847214156562 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.44559585492227977, | |
"acc_stderr": 0.03587014986075659, | |
"acc_norm": 0.44559585492227977, | |
"acc_norm_stderr": 0.03587014986075659 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2543859649122807, | |
"acc_stderr": 0.04096985139843671, | |
"acc_norm": 0.2543859649122807, | |
"acc_norm_stderr": 0.04096985139843671 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.48073394495412847, | |
"acc_stderr": 0.02142140298254889, | |
"acc_norm": 0.48073394495412847, | |
"acc_norm_stderr": 0.02142140298254889 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.24603174603174602, | |
"acc_stderr": 0.03852273364924317, | |
"acc_norm": 0.24603174603174602, | |
"acc_norm_stderr": 0.03852273364924317 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.3627450980392157, | |
"acc_stderr": 0.027530078447110307, | |
"acc_norm": 0.3627450980392157, | |
"acc_norm_stderr": 0.027530078447110307 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.049888765156985884, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.049888765156985884 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.5371900826446281, | |
"acc_stderr": 0.04551711196104218, | |
"acc_norm": 0.5371900826446281, | |
"acc_norm_stderr": 0.04551711196104218 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.35526315789473684, | |
"acc_stderr": 0.038947344870133176, | |
"acc_norm": 0.35526315789473684, | |
"acc_norm_stderr": 0.038947344870133176 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3660130718954248, | |
"acc_stderr": 0.019488025745529675, | |
"acc_norm": 0.3660130718954248, | |
"acc_norm_stderr": 0.019488025745529675 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.3049645390070922, | |
"acc_stderr": 0.027464708442022135, | |
"acc_norm": 0.3049645390070922, | |
"acc_norm_stderr": 0.027464708442022135 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04109974682633932, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04109974682633932 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.2361111111111111, | |
"acc_stderr": 0.028963702570791033, | |
"acc_norm": 0.2361111111111111, | |
"acc_norm_stderr": 0.028963702570791033 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2424581005586592, | |
"acc_stderr": 0.01433352205921789, | |
"acc_norm": 0.2424581005586592, | |
"acc_norm_stderr": 0.01433352205921789 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.32, | |
"acc_stderr": 0.046882617226215034, | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.046882617226215034 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.41, | |
"acc_stderr": 0.049431107042371025, | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.049431107042371025 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.3125, | |
"acc_stderr": 0.02815637344037142, | |
"acc_norm": 0.3125, | |
"acc_norm_stderr": 0.02815637344037142 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.37551020408163266, | |
"acc_stderr": 0.031001209039894836, | |
"acc_norm": 0.37551020408163266, | |
"acc_norm_stderr": 0.031001209039894836 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.5738396624472574, | |
"acc_stderr": 0.03219035703131774, | |
"acc_norm": 0.5738396624472574, | |
"acc_norm_stderr": 0.03219035703131774 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.28683181225554105, | |
"acc_stderr": 0.011551504781176933, | |
"acc_norm": 0.28683181225554105, | |
"acc_norm_stderr": 0.011551504781176933 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.4166666666666667, | |
"acc_stderr": 0.0346022832723917, | |
"acc_norm": 0.4166666666666667, | |
"acc_norm_stderr": 0.0346022832723917 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.47878787878787876, | |
"acc_stderr": 0.03900828913737302, | |
"acc_norm": 0.47878787878787876, | |
"acc_norm_stderr": 0.03900828913737302 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.27539779681762544, | |
"mc1_stderr": 0.015638135667775523, | |
"mc2": 0.43540541386680215, | |
"mc2_stderr": 0.015086654503820634 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.43683589138134593, | |
"acc_stderr": 0.017052633559856076, | |
"acc_norm": 0.5324675324675324, | |
"acc_norm_stderr": 0.017154073716682868 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge", | |
"model_sha": "b38c6d07fbdb2119f7c1ee28c1a764c305547aec", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |