{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.4087030716723549, "acc_stderr": 0.014365750345427006, "acc_norm": 0.4564846416382253, "acc_norm_stderr": 0.01455594976049644 }, "harness|ko_hellaswag|10": { "acc": 0.43168691495717987, "acc_stderr": 0.0049429906231311166, "acc_norm": 0.5795658235411273, "acc_norm_stderr": 0.0049261984839487115 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5321637426900585, "acc_stderr": 0.038268824176603704, "acc_norm": 0.5321637426900585, "acc_norm_stderr": 0.038268824176603704 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.049505043821289195, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.049505043821289195 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5555555555555556, "acc_stderr": 0.017769250583533246, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.017769250583533246 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4074074074074074, "acc_stderr": 0.0424463323835323, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.0424463323835323 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4, "acc_stderr": 0.03202563076101735, "acc_norm": 0.4, "acc_norm_stderr": 0.03202563076101735 }, "harness|ko_mmlu_virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.038695433234721015, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.038695433234721015 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4855305466237942, "acc_stderr": 0.028386198084177673, "acc_norm": 0.4855305466237942, "acc_norm_stderr": 0.028386198084177673 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5022421524663677, "acc_stderr": 0.033557465352232634, "acc_norm": 0.5022421524663677, "acc_norm_stderr": 0.033557465352232634 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.035029757994130085, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.035029757994130085 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3724137931034483, "acc_stderr": 0.0402873153294756, "acc_norm": 0.3724137931034483, "acc_norm_stderr": 0.0402873153294756 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.04158307533083286, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.04158307533083286 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.03233943468182088, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.03233943468182088 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.45897435897435895, "acc_stderr": 0.025265525491284295, "acc_norm": 0.45897435897435895, "acc_norm_stderr": 0.025265525491284295 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5462962962962963, "acc_stderr": 0.048129173245368216, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.048129173245368216 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35467980295566504, "acc_stderr": 0.03366124489051449, "acc_norm": 0.35467980295566504, "acc_norm_stderr": 0.03366124489051449 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.45806451612903226, "acc_stderr": 0.028343787250540636, "acc_norm": 0.45806451612903226, "acc_norm_stderr": 0.028343787250540636 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6495726495726496, "acc_stderr": 0.0312561082442188, "acc_norm": 0.6495726495726496, "acc_norm_stderr": 0.0312561082442188 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4490566037735849, "acc_stderr": 0.030612730713641092, "acc_norm": 0.4490566037735849, "acc_norm_stderr": 0.030612730713641092 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2518518518518518, "acc_stderr": 0.026466117538959916, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.026466117538959916 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5671641791044776, "acc_stderr": 0.03503490923673281, "acc_norm": 0.5671641791044776, "acc_norm_stderr": 0.03503490923673281 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4161849710982659, "acc_stderr": 0.037585177754049466, "acc_norm": 0.4161849710982659, "acc_norm_stderr": 0.037585177754049466 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.023695415009463087, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.023695415009463087 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3958333333333333, "acc_stderr": 0.04089465449325583, "acc_norm": 0.3958333333333333, "acc_norm_stderr": 0.04089465449325583 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.49710982658959535, "acc_stderr": 0.02691864538323901, "acc_norm": 0.49710982658959535, "acc_norm_stderr": 0.02691864538323901 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5766871165644172, "acc_stderr": 0.03881891213334383, "acc_norm": 0.5766871165644172, "acc_norm_stderr": 0.03881891213334383 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.49691358024691357, "acc_stderr": 0.027820214158594377, "acc_norm": 0.49691358024691357, "acc_norm_stderr": 0.027820214158594377 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5440414507772021, "acc_stderr": 0.03594413711272436, "acc_norm": 0.5440414507772021, "acc_norm_stderr": 0.03594413711272436 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.03835153954399419, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.03835153954399419 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5871559633027523, "acc_stderr": 0.02110912813341391, "acc_norm": 0.5871559633027523, "acc_norm_stderr": 0.02110912813341391 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4215686274509804, "acc_stderr": 0.02827549015679143, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.02827549015679143 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5950413223140496, "acc_stderr": 0.04481137755942469, "acc_norm": 0.5950413223140496, "acc_norm_stderr": 0.04481137755942469 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.375, "acc_stderr": 0.039397364351956274, "acc_norm": 0.375, "acc_norm_stderr": 0.039397364351956274 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.42810457516339867, "acc_stderr": 0.0200176292142131, "acc_norm": 0.42810457516339867, "acc_norm_stderr": 0.0200176292142131 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3404255319148936, "acc_stderr": 0.028267657482650144, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.028267657482650144 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697625, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697625 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.01435591196476786, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.01435591196476786 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.38235294117647056, "acc_stderr": 0.02952009569768775, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.02952009569768775 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5020408163265306, "acc_stderr": 0.0320089533497105, "acc_norm": 0.5020408163265306, "acc_norm_stderr": 0.0320089533497105 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6751054852320675, "acc_stderr": 0.030486039389105303, "acc_norm": 0.6751054852320675, "acc_norm_stderr": 0.030486039389105303 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3741851368970013, "acc_stderr": 0.012359335618172063, "acc_norm": 0.3741851368970013, "acc_norm_stderr": 0.012359335618172063 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03503235296367993, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03503235296367993 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5575757575757576, "acc_stderr": 0.03878372113711275, "acc_norm": 0.5575757575757576, "acc_norm_stderr": 0.03878372113711275 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3243574051407589, "mc1_stderr": 0.01638797677964794, "mc2": 0.4753344144954286, "mc2_stderr": 0.015470233894001158 }, "harness|ko_commongen_v2|2": { "acc": 0.500590318772137, "acc_stderr": 0.01719034212344859, "acc_norm": 0.5726092089728453, "acc_norm_stderr": 0.017008129844823156 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v4-13B", "model_sha": "fabf605d23d96e548908ffe9f0ad49dae01c46f8", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }