{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3370307167235495, "acc_stderr": 0.01381347665290228, "acc_norm": 0.38054607508532423, "acc_norm_stderr": 0.014188277712349822 }, "harness|ko_hellaswag|10": { "acc": 0.3623780123481378, "acc_stderr": 0.004797048154893968, "acc_norm": 0.46883091017725553, "acc_norm_stderr": 0.00498007670739243 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4327485380116959, "acc_stderr": 0.03799978644370607, "acc_norm": 0.4327485380116959, "acc_norm_stderr": 0.03799978644370607 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.049505043821289195, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.049505043821289195 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4878671775223499, "acc_stderr": 0.017874698667491355, "acc_norm": 0.4878671775223499, "acc_norm_stderr": 0.017874698667491355 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3872340425531915, "acc_stderr": 0.03184389265339525, "acc_norm": 0.3872340425531915, "acc_norm_stderr": 0.03184389265339525 }, "harness|ko_mmlu_virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.0374005938202932, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.0374005938202932 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4887459807073955, "acc_stderr": 0.028390897396863533, "acc_norm": 0.4887459807073955, "acc_norm_stderr": 0.028390897396863533 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4260089686098655, "acc_stderr": 0.0331883328621728, "acc_norm": 0.4260089686098655, "acc_norm_stderr": 0.0331883328621728 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.45454545454545453, "acc_stderr": 0.03547601494006937, "acc_norm": 0.45454545454545453, "acc_norm_stderr": 0.03547601494006937 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4068965517241379, "acc_stderr": 0.040937939812662374, "acc_norm": 0.4068965517241379, "acc_norm_stderr": 0.040937939812662374 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617749, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617749 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4495798319327731, "acc_stderr": 0.03231293497137707, "acc_norm": 0.4495798319327731, "acc_norm_stderr": 0.03231293497137707 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4256410256410256, "acc_stderr": 0.025069094387296546, "acc_norm": 0.4256410256410256, "acc_norm_stderr": 0.025069094387296546 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.03376458246509567, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.03376458246509567 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47419354838709676, "acc_stderr": 0.02840609505765332, "acc_norm": 0.47419354838709676, "acc_norm_stderr": 0.02840609505765332 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6153846153846154, "acc_stderr": 0.03187195347942466, "acc_norm": 0.6153846153846154, "acc_norm_stderr": 0.03187195347942466 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4188679245283019, "acc_stderr": 0.030365050829115215, "acc_norm": 0.4188679245283019, "acc_norm_stderr": 0.030365050829115215 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4818181818181818, "acc_stderr": 0.04785964010794917, "acc_norm": 0.4818181818181818, "acc_norm_stderr": 0.04785964010794917 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.028406533090608466, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.028406533090608466 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5522388059701493, "acc_stderr": 0.03516184772952166, "acc_norm": 0.5522388059701493, "acc_norm_stderr": 0.03516184772952166 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3468208092485549, "acc_stderr": 0.036291466701596636, "acc_norm": 0.3468208092485549, "acc_norm_stderr": 0.036291466701596636 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.023695415009463084, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.023695415009463084 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3680555555555556, "acc_stderr": 0.04032999053960719, "acc_norm": 0.3680555555555556, "acc_norm_stderr": 0.04032999053960719 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.49421965317919075, "acc_stderr": 0.02691729617914911, "acc_norm": 0.49421965317919075, "acc_norm_stderr": 0.02691729617914911 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4049079754601227, "acc_stderr": 0.03856672163548913, "acc_norm": 0.4049079754601227, "acc_norm_stderr": 0.03856672163548913 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.42592592592592593, "acc_stderr": 0.027513747284379417, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.027513747284379417 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.533678756476684, "acc_stderr": 0.03600244069867178, "acc_norm": 0.533678756476684, "acc_norm_stderr": 0.03600244069867178 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489361, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489361 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.46788990825688076, "acc_stderr": 0.021393071222680804, "acc_norm": 0.46788990825688076, "acc_norm_stderr": 0.021393071222680804 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.40522875816993464, "acc_stderr": 0.028110928492809075, "acc_norm": 0.40522875816993464, "acc_norm_stderr": 0.028110928492809075 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5537190082644629, "acc_stderr": 0.04537935177947879, "acc_norm": 0.5537190082644629, "acc_norm_stderr": 0.04537935177947879 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4342105263157895, "acc_stderr": 0.04033565667848319, "acc_norm": 0.4342105263157895, "acc_norm_stderr": 0.04033565667848319 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.36437908496732024, "acc_stderr": 0.019469518221573702, "acc_norm": 0.36437908496732024, "acc_norm_stderr": 0.019469518221573702 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3120567375886525, "acc_stderr": 0.02764012054516993, "acc_norm": 0.3120567375886525, "acc_norm_stderr": 0.02764012054516993 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3425925925925926, "acc_stderr": 0.03236585252602157, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.03236585252602157 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.26927374301675977, "acc_stderr": 0.014835616582882601, "acc_norm": 0.26927374301675977, "acc_norm_stderr": 0.014835616582882601 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4117647058823529, "acc_stderr": 0.029896163033125468, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.029896163033125468 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5061224489795918, "acc_stderr": 0.03200682020163907, "acc_norm": 0.5061224489795918, "acc_norm_stderr": 0.03200682020163907 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5738396624472574, "acc_stderr": 0.03219035703131774, "acc_norm": 0.5738396624472574, "acc_norm_stderr": 0.03219035703131774 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.34615384615384615, "acc_stderr": 0.012150699768228568, "acc_norm": 0.34615384615384615, "acc_norm_stderr": 0.012150699768228568 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.44607843137254904, "acc_stderr": 0.03488845451304974, "acc_norm": 0.44607843137254904, "acc_norm_stderr": 0.03488845451304974 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4909090909090909, "acc_stderr": 0.03903698647748441, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.03903698647748441 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2766217870257038, "mc1_stderr": 0.0156596057553269, "mc2": 0.44247428746712286, "mc2_stderr": 0.015350644205547385 }, "harness|ko_commongen_v2|2": { "acc": 0.3612750885478158, "acc_stderr": 0.016515463022412014, "acc_norm": 0.4049586776859504, "acc_norm_stderr": 0.016876941165045612 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "garage-bAInd/Platypus2-13B", "model_sha": "0a474bc0e76203528db789f027f4d6cce2727cce", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }