{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.35238907849829354, "acc_stderr": 0.013960142600598684, "acc_norm": 0.4121160409556314, "acc_norm_stderr": 0.0143839153022254 }, "harness|ko_hellaswag|10": { "acc": 0.4012148974307907, "acc_stderr": 0.004891426533390626, "acc_norm": 0.5355506871141207, "acc_norm_stderr": 0.0049771527464786015 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5029239766081871, "acc_stderr": 0.03834759370936839, "acc_norm": 0.5029239766081871, "acc_norm_stderr": 0.03834759370936839 }, "harness|ko_mmlu_management|5": { "acc": 0.5825242718446602, "acc_stderr": 0.048828405482122375, "acc_norm": 0.5825242718446602, "acc_norm_stderr": 0.048828405482122375 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5427841634738186, "acc_stderr": 0.017814385238534444, "acc_norm": 0.5427841634738186, "acc_norm_stderr": 0.017814385238534444 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45925925925925926, "acc_stderr": 0.04304979692464244, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464244 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.40425531914893614, "acc_stderr": 0.03208115750788684, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.03208115750788684 }, "harness|ko_mmlu_virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079022, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079022 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5176848874598071, "acc_stderr": 0.028380322849077138, "acc_norm": 0.5176848874598071, "acc_norm_stderr": 0.028380322849077138 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.48878923766816146, "acc_stderr": 0.033549366530984746, "acc_norm": 0.48878923766816146, "acc_norm_stderr": 0.033549366530984746 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5114503816793893, "acc_stderr": 0.043841400240780176, "acc_norm": 0.5114503816793893, "acc_norm_stderr": 0.043841400240780176 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6262626262626263, "acc_stderr": 0.03446897738659332, "acc_norm": 0.6262626262626263, "acc_norm_stderr": 0.03446897738659332 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006717, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006717 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5042016806722689, "acc_stderr": 0.03247734334448111, "acc_norm": 0.5042016806722689, "acc_norm_stderr": 0.03247734334448111 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4794871794871795, "acc_stderr": 0.025329663163489943, "acc_norm": 0.4794871794871795, "acc_norm_stderr": 0.025329663163489943 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5, "acc_stderr": 0.04833682445228318, "acc_norm": 0.5, "acc_norm_stderr": 0.04833682445228318 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.034767257476490385, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.034767257476490385 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5258064516129032, "acc_stderr": 0.028406095057653333, "acc_norm": 0.5258064516129032, "acc_norm_stderr": 0.028406095057653333 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7094017094017094, "acc_stderr": 0.029745048572674064, "acc_norm": 0.7094017094017094, "acc_norm_stderr": 0.029745048572674064 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5094339622641509, "acc_stderr": 0.0307673947078081, "acc_norm": 0.5094339622641509, "acc_norm_stderr": 0.0307673947078081 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5, "acc_stderr": 0.04789131426105757, "acc_norm": 0.5, "acc_norm_stderr": 0.04789131426105757 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524586, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524586 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6467661691542289, "acc_stderr": 0.03379790611796777, "acc_norm": 0.6467661691542289, "acc_norm_stderr": 0.03379790611796777 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4508670520231214, "acc_stderr": 0.0379401267469703, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.0379401267469703 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.34656084656084657, "acc_stderr": 0.02450877752102842, "acc_norm": 0.34656084656084657, "acc_norm_stderr": 0.02450877752102842 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4444444444444444, "acc_stderr": 0.041553199555931467, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.041553199555931467 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5028901734104047, "acc_stderr": 0.02691864538323901, "acc_norm": 0.5028901734104047, "acc_norm_stderr": 0.02691864538323901 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4785276073619632, "acc_stderr": 0.0392474687675113, "acc_norm": 0.4785276073619632, "acc_norm_stderr": 0.0392474687675113 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4783950617283951, "acc_stderr": 0.027794760105008746, "acc_norm": 0.4783950617283951, "acc_norm_stderr": 0.027794760105008746 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.6062176165803109, "acc_stderr": 0.035260770955482405, "acc_norm": 0.6062176165803109, "acc_norm_stderr": 0.035260770955482405 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.044629175353369376, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.044629175353369376 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6293577981651376, "acc_stderr": 0.02070745816435298, "acc_norm": 0.6293577981651376, "acc_norm_stderr": 0.02070745816435298 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5294117647058824, "acc_stderr": 0.028580341065138293, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.028580341065138293 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_international_law|5": { "acc": 0.628099173553719, "acc_stderr": 0.044120158066245044, "acc_norm": 0.628099173553719, "acc_norm_stderr": 0.044120158066245044 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4868421052631579, "acc_stderr": 0.04067533136309173, "acc_norm": 0.4868421052631579, "acc_norm_stderr": 0.04067533136309173 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4166666666666667, "acc_stderr": 0.019944914136873583, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.019944914136873583 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3404255319148936, "acc_stderr": 0.028267657482650147, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.028267657482650147 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697623, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697623 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.36574074074074076, "acc_stderr": 0.032847388576472056, "acc_norm": 0.36574074074074076, "acc_norm_stderr": 0.032847388576472056 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961443, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961443 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4522058823529412, "acc_stderr": 0.030233758551596452, "acc_norm": 0.4522058823529412, "acc_norm_stderr": 0.030233758551596452 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5102040816326531, "acc_stderr": 0.03200255347893783, "acc_norm": 0.5102040816326531, "acc_norm_stderr": 0.03200255347893783 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6329113924050633, "acc_stderr": 0.031376240725616185, "acc_norm": 0.6329113924050633, "acc_norm_stderr": 0.031376240725616185 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.33572359843546284, "acc_stderr": 0.012061304157664618, "acc_norm": 0.33572359843546284, "acc_norm_stderr": 0.012061304157664618 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5343137254901961, "acc_stderr": 0.03501038327635897, "acc_norm": 0.5343137254901961, "acc_norm_stderr": 0.03501038327635897 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5757575757575758, "acc_stderr": 0.03859268142070264, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.03859268142070264 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2558139534883721, "mc1_stderr": 0.015274176219283338, "mc2": 0.40795074977785445, "mc2_stderr": 0.014747748547063478 }, "harness|ko_commongen_v2|2": { "acc": 0.5608028335301063, "acc_stderr": 0.017062775744780705, "acc_norm": 0.6210153482880756, "acc_norm_stderr": 0.016679260684229293 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15-dedup", "model_sha": "108053f88e4632dabb22ba6313f0441deff840d0", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }