results
/
Jaewoo1
/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus
/result_2023-10-04 09:05:17.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.29692832764505117, | |
"acc_stderr": 0.013352025976725222, | |
"acc_norm": 0.34812286689419797, | |
"acc_norm_stderr": 0.013921008595179342 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.35311690898227444, | |
"acc_stderr": 0.004769618829196517, | |
"acc_norm": 0.42939653455486954, | |
"acc_norm_stderr": 0.0049397843114489855 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.4152046783625731, | |
"acc_stderr": 0.03779275945503201, | |
"acc_norm": 0.4152046783625731, | |
"acc_norm_stderr": 0.03779275945503201 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.42718446601941745, | |
"acc_stderr": 0.04897957737781169, | |
"acc_norm": 0.42718446601941745, | |
"acc_norm_stderr": 0.04897957737781169 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.41507024265644954, | |
"acc_stderr": 0.017620137003655265, | |
"acc_norm": 0.41507024265644954, | |
"acc_norm_stderr": 0.017620137003655265 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.37037037037037035, | |
"acc_stderr": 0.04171654161354543, | |
"acc_norm": 0.37037037037037035, | |
"acc_norm_stderr": 0.04171654161354543 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.32340425531914896, | |
"acc_stderr": 0.03057944277361034, | |
"acc_norm": 0.32340425531914896, | |
"acc_norm_stderr": 0.03057944277361034 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.35542168674698793, | |
"acc_stderr": 0.03726214354322415, | |
"acc_norm": 0.35542168674698793, | |
"acc_norm_stderr": 0.03726214354322415 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.4405144694533762, | |
"acc_stderr": 0.028196400574197422, | |
"acc_norm": 0.4405144694533762, | |
"acc_norm_stderr": 0.028196400574197422 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.3632286995515695, | |
"acc_stderr": 0.032277904428505, | |
"acc_norm": 0.3632286995515695, | |
"acc_norm_stderr": 0.032277904428505 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4122137404580153, | |
"acc_stderr": 0.04317171194870254, | |
"acc_norm": 0.4122137404580153, | |
"acc_norm_stderr": 0.04317171194870254 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206845, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206845 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.43434343434343436, | |
"acc_stderr": 0.03531505879359183, | |
"acc_norm": 0.43434343434343436, | |
"acc_norm_stderr": 0.03531505879359183 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.36551724137931035, | |
"acc_stderr": 0.04013124195424386, | |
"acc_norm": 0.36551724137931035, | |
"acc_norm_stderr": 0.04013124195424386 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.18627450980392157, | |
"acc_stderr": 0.03873958714149351, | |
"acc_norm": 0.18627450980392157, | |
"acc_norm_stderr": 0.03873958714149351 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.3739495798319328, | |
"acc_stderr": 0.031429466378837076, | |
"acc_norm": 0.3739495798319328, | |
"acc_norm_stderr": 0.031429466378837076 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.34615384615384615, | |
"acc_stderr": 0.024121125416941173, | |
"acc_norm": 0.34615384615384615, | |
"acc_norm_stderr": 0.024121125416941173 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.05, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.05 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.34, | |
"acc_stderr": 0.04760952285695235, | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.04760952285695235 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.4537037037037037, | |
"acc_stderr": 0.04812917324536823, | |
"acc_norm": 0.4537037037037037, | |
"acc_norm_stderr": 0.04812917324536823 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3251231527093596, | |
"acc_stderr": 0.032957975663112704, | |
"acc_norm": 0.3251231527093596, | |
"acc_norm_stderr": 0.032957975663112704 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.3774193548387097, | |
"acc_stderr": 0.02757596072327824, | |
"acc_norm": 0.3774193548387097, | |
"acc_norm_stderr": 0.02757596072327824 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.5726495726495726, | |
"acc_stderr": 0.03240847393516326, | |
"acc_norm": 0.5726495726495726, | |
"acc_norm_stderr": 0.03240847393516326 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.37358490566037733, | |
"acc_stderr": 0.02977308271331988, | |
"acc_norm": 0.37358490566037733, | |
"acc_norm_stderr": 0.02977308271331988 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.4636363636363636, | |
"acc_stderr": 0.04776449162396197, | |
"acc_norm": 0.4636363636363636, | |
"acc_norm_stderr": 0.04776449162396197 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.28888888888888886, | |
"acc_stderr": 0.02763490726417854, | |
"acc_norm": 0.28888888888888886, | |
"acc_norm_stderr": 0.02763490726417854 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2980132450331126, | |
"acc_stderr": 0.037345356767871984, | |
"acc_norm": 0.2980132450331126, | |
"acc_norm_stderr": 0.037345356767871984 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.4925373134328358, | |
"acc_stderr": 0.03535140084276719, | |
"acc_norm": 0.4925373134328358, | |
"acc_norm_stderr": 0.03535140084276719 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.32947976878612717, | |
"acc_stderr": 0.03583901754736412, | |
"acc_norm": 0.32947976878612717, | |
"acc_norm_stderr": 0.03583901754736412 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.26455026455026454, | |
"acc_stderr": 0.022717467897708617, | |
"acc_norm": 0.26455026455026454, | |
"acc_norm_stderr": 0.022717467897708617 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3402777777777778, | |
"acc_stderr": 0.03962135573486219, | |
"acc_norm": 0.3402777777777778, | |
"acc_norm_stderr": 0.03962135573486219 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.044619604333847394, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.044619604333847394 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.52, | |
"acc_stderr": 0.050211673156867795, | |
"acc_norm": 0.52, | |
"acc_norm_stderr": 0.050211673156867795 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.4190751445086705, | |
"acc_stderr": 0.026564178111422622, | |
"acc_norm": 0.4190751445086705, | |
"acc_norm_stderr": 0.026564178111422622 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.4049079754601227, | |
"acc_stderr": 0.03856672163548914, | |
"acc_norm": 0.4049079754601227, | |
"acc_norm_stderr": 0.03856672163548914 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.41358024691358025, | |
"acc_stderr": 0.027402042040269955, | |
"acc_norm": 0.41358024691358025, | |
"acc_norm_stderr": 0.027402042040269955 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.045126085985421296, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.045126085985421296 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.43005181347150256, | |
"acc_stderr": 0.03572954333144808, | |
"acc_norm": 0.43005181347150256, | |
"acc_norm_stderr": 0.03572954333144808 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2807017543859649, | |
"acc_stderr": 0.042270544512321984, | |
"acc_norm": 0.2807017543859649, | |
"acc_norm_stderr": 0.042270544512321984 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.3834862385321101, | |
"acc_stderr": 0.020847156641915984, | |
"acc_norm": 0.3834862385321101, | |
"acc_norm_stderr": 0.020847156641915984 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.2698412698412698, | |
"acc_stderr": 0.03970158273235172, | |
"acc_norm": 0.2698412698412698, | |
"acc_norm_stderr": 0.03970158273235172 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.3790849673202614, | |
"acc_stderr": 0.027780141207023337, | |
"acc_norm": 0.3790849673202614, | |
"acc_norm_stderr": 0.027780141207023337 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.5785123966942148, | |
"acc_stderr": 0.04507732278775089, | |
"acc_norm": 0.5785123966942148, | |
"acc_norm_stderr": 0.04507732278775089 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.3223684210526316, | |
"acc_stderr": 0.038035102483515854, | |
"acc_norm": 0.3223684210526316, | |
"acc_norm_stderr": 0.038035102483515854 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3284313725490196, | |
"acc_stderr": 0.01899970738316267, | |
"acc_norm": 0.3284313725490196, | |
"acc_norm_stderr": 0.01899970738316267 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.30851063829787234, | |
"acc_stderr": 0.027553366165101362, | |
"acc_norm": 0.30851063829787234, | |
"acc_norm_stderr": 0.027553366165101362 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.35714285714285715, | |
"acc_stderr": 0.04547960999764376, | |
"acc_norm": 0.35714285714285715, | |
"acc_norm_stderr": 0.04547960999764376 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.3472222222222222, | |
"acc_stderr": 0.03246887243637648, | |
"acc_norm": 0.3472222222222222, | |
"acc_norm_stderr": 0.03246887243637648 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.264804469273743, | |
"acc_stderr": 0.014756906483260657, | |
"acc_norm": 0.264804469273743, | |
"acc_norm_stderr": 0.014756906483260657 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.048241815132442176, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.048241815132442176 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.33455882352941174, | |
"acc_stderr": 0.028661996202335314, | |
"acc_norm": 0.33455882352941174, | |
"acc_norm_stderr": 0.028661996202335314 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.43673469387755104, | |
"acc_stderr": 0.031751952375833226, | |
"acc_norm": 0.43673469387755104, | |
"acc_norm_stderr": 0.031751952375833226 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.4219409282700422, | |
"acc_stderr": 0.032148146302403695, | |
"acc_norm": 0.4219409282700422, | |
"acc_norm_stderr": 0.032148146302403695 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.2920469361147327, | |
"acc_stderr": 0.011613349136271817, | |
"acc_norm": 0.2920469361147327, | |
"acc_norm_stderr": 0.011613349136271817 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.4068627450980392, | |
"acc_stderr": 0.03447891136353383, | |
"acc_norm": 0.4068627450980392, | |
"acc_norm_stderr": 0.03447891136353383 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.4303030303030303, | |
"acc_stderr": 0.03866225962879077, | |
"acc_norm": 0.4303030303030303, | |
"acc_norm_stderr": 0.03866225962879077 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.3023255813953488, | |
"mc1_stderr": 0.01607750926613303, | |
"mc2": 0.4750714543386988, | |
"mc2_stderr": 0.016159472828434183 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.2727272727272727, | |
"acc_stderr": 0.015311853110300352, | |
"acc_norm": 0.34946871310507677, | |
"acc_norm_stderr": 0.01639279708576985 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus", | |
"model_sha": "1c97acb58f2a740d7994d1ea7b0c02c234bbde3a", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |