{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3728668941979522, "acc_stderr": 0.01413117676013117, "acc_norm": 0.41467576791808874, "acc_norm_stderr": 0.014397070564409174 }, "harness|ko_hellaswag|10": { "acc": 0.36496713802031466, "acc_stderr": 0.004804370563856228, "acc_norm": 0.45717984465245964, "acc_norm_stderr": 0.004971449552787176 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4619883040935672, "acc_stderr": 0.03823727092882307, "acc_norm": 0.4619883040935672, "acc_norm_stderr": 0.03823727092882307 }, "harness|ko_mmlu_management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.04721188506097172, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.04721188506097172 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4904214559386973, "acc_stderr": 0.017876682275340887, "acc_norm": 0.4904214559386973, "acc_norm_stderr": 0.017876682275340887 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4, "acc_stderr": 0.04232073695151589, "acc_norm": 0.4, "acc_norm_stderr": 0.04232073695151589 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421255, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421255 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4808510638297872, "acc_stderr": 0.03266204299064678, "acc_norm": 0.4808510638297872, "acc_norm_stderr": 0.03266204299064678 }, "harness|ko_mmlu_virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.0374005938202932, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.0374005938202932 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5048231511254019, "acc_stderr": 0.02839677044411129, "acc_norm": 0.5048231511254019, "acc_norm_stderr": 0.02839677044411129 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.45739910313901344, "acc_stderr": 0.033435777055830646, "acc_norm": 0.45739910313901344, "acc_norm_stderr": 0.033435777055830646 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48091603053435117, "acc_stderr": 0.04382094705550989, "acc_norm": 0.48091603053435117, "acc_norm_stderr": 0.04382094705550989 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5858585858585859, "acc_stderr": 0.03509438348879629, "acc_norm": 0.5858585858585859, "acc_norm_stderr": 0.03509438348879629 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878151, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878151 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077636, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5042016806722689, "acc_stderr": 0.03247734334448111, "acc_norm": 0.5042016806722689, "acc_norm_stderr": 0.03247734334448111 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.43846153846153846, "acc_stderr": 0.025158266016868554, "acc_norm": 0.43846153846153846, "acc_norm_stderr": 0.025158266016868554 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5648148148148148, "acc_stderr": 0.04792898170907062, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.04792898170907062 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406796, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406796 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.46774193548387094, "acc_stderr": 0.028384747788813332, "acc_norm": 0.46774193548387094, "acc_norm_stderr": 0.028384747788813332 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7307692307692307, "acc_stderr": 0.029058588303748845, "acc_norm": 0.7307692307692307, "acc_norm_stderr": 0.029058588303748845 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.46037735849056605, "acc_stderr": 0.03067609659938918, "acc_norm": 0.46037735849056605, "acc_norm_stderr": 0.03067609659938918 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4909090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131143, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131143 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.03734535676787198, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.03734535676787198 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895538, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895538 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4497354497354497, "acc_stderr": 0.02562085704293665, "acc_norm": 0.4497354497354497, "acc_norm_stderr": 0.02562085704293665 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3541666666666667, "acc_stderr": 0.039994111357535424, "acc_norm": 0.3541666666666667, "acc_norm_stderr": 0.039994111357535424 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5086705202312138, "acc_stderr": 0.026915047355369804, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.026915047355369804 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5276073619631901, "acc_stderr": 0.03922378290610991, "acc_norm": 0.5276073619631901, "acc_norm_stderr": 0.03922378290610991 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4722222222222222, "acc_stderr": 0.027777777777777804, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.027777777777777804 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939098, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939098 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5440414507772021, "acc_stderr": 0.03594413711272437, "acc_norm": 0.5440414507772021, "acc_norm_stderr": 0.03594413711272437 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.044045561573747685, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.044045561573747685 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5266055045871559, "acc_stderr": 0.021406952688151574, "acc_norm": 0.5266055045871559, "acc_norm_stderr": 0.021406952688151574 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.434640522875817, "acc_stderr": 0.028384256704883037, "acc_norm": 0.434640522875817, "acc_norm_stderr": 0.028384256704883037 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.042664163633521664, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.042664163633521664 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4473684210526316, "acc_stderr": 0.040463368839782514, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.040463368839782514 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3758169934640523, "acc_stderr": 0.019594021136577447, "acc_norm": 0.3758169934640523, "acc_norm_stderr": 0.019594021136577447 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.34397163120567376, "acc_stderr": 0.02833801742861132, "acc_norm": 0.34397163120567376, "acc_norm_stderr": 0.02833801742861132 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4398148148148148, "acc_stderr": 0.033851779760448106, "acc_norm": 0.4398148148148148, "acc_norm_stderr": 0.033851779760448106 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2581005586592179, "acc_stderr": 0.014635185616527824, "acc_norm": 0.2581005586592179, "acc_norm_stderr": 0.014635185616527824 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939098, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939098 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3125, "acc_stderr": 0.02815637344037142, "acc_norm": 0.3125, "acc_norm_stderr": 0.02815637344037142 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.563265306122449, "acc_stderr": 0.03175195237583323, "acc_norm": 0.563265306122449, "acc_norm_stderr": 0.03175195237583323 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5443037974683544, "acc_stderr": 0.032419206846933335, "acc_norm": 0.5443037974683544, "acc_norm_stderr": 0.032419206846933335 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.34419817470664926, "acc_stderr": 0.012134433741002574, "acc_norm": 0.34419817470664926, "acc_norm_stderr": 0.012134433741002574 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4411764705882353, "acc_stderr": 0.034849415144292316, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.034849415144292316 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5212121212121212, "acc_stderr": 0.03900828913737302, "acc_norm": 0.5212121212121212, "acc_norm_stderr": 0.03900828913737302 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2913096695226438, "mc1_stderr": 0.01590598704818483, "mc2": 0.46212381405853503, "mc2_stderr": 0.01568383395016852 }, "harness|ko_commongen_v2|2": { "acc": 0.4923258559622196, "acc_stderr": 0.01718832921965428, "acc_norm": 0.5360094451003542, "acc_norm_stderr": 0.017145715365486664 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "OpenBuddy/openbuddy-llemma-34b-v13.2", "model_sha": "1abff297a8eae622c0f106854f2a8fbfcfa9c119", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }