{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3822525597269625, "acc_stderr": 0.014200454049979274, "acc_norm": 0.45051194539249145, "acc_norm_stderr": 0.014539646098471625 }, "harness|ko_hellaswag|10": { "acc": 0.41575383389762993, "acc_stderr": 0.004918442328872009, "acc_norm": 0.5518820952001593, "acc_norm_stderr": 0.0049628462061255 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5672514619883041, "acc_stderr": 0.03799978644370606, "acc_norm": 0.5672514619883041, "acc_norm_stderr": 0.03799978644370606 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.049505043821289195, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.049505043821289195 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5363984674329502, "acc_stderr": 0.017832524079593265, "acc_norm": 0.5363984674329502, "acc_norm_stderr": 0.017832524079593265 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.37037037037037035, "acc_stderr": 0.04171654161354544, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.04171654161354544 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.31063829787234043, "acc_stderr": 0.03025123757921317, "acc_norm": 0.31063829787234043, "acc_norm_stderr": 0.03025123757921317 }, "harness|ko_mmlu_virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.03836722176598053, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.03836722176598053 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5048231511254019, "acc_stderr": 0.02839677044411129, "acc_norm": 0.5048231511254019, "acc_norm_stderr": 0.02839677044411129 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.45739910313901344, "acc_stderr": 0.033435777055830646, "acc_norm": 0.45739910313901344, "acc_norm_stderr": 0.033435777055830646 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5419847328244275, "acc_stderr": 0.04369802690578756, "acc_norm": 0.5419847328244275, "acc_norm_stderr": 0.04369802690578756 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488583, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488583 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.48484848484848486, "acc_stderr": 0.03560716516531061, "acc_norm": 0.48484848484848486, "acc_norm_stderr": 0.03560716516531061 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4, "acc_stderr": 0.04082482904638629, "acc_norm": 0.4, "acc_norm_stderr": 0.04082482904638629 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.0379328118530781, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.0379328118530781 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4957983193277311, "acc_stderr": 0.03247734334448111, "acc_norm": 0.4957983193277311, "acc_norm_stderr": 0.03247734334448111 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4205128205128205, "acc_stderr": 0.025028610276710855, "acc_norm": 0.4205128205128205, "acc_norm_stderr": 0.025028610276710855 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.048262172941398944, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.048262172941398944 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3497536945812808, "acc_stderr": 0.03355400904969565, "acc_norm": 0.3497536945812808, "acc_norm_stderr": 0.03355400904969565 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.43870967741935485, "acc_stderr": 0.028229497320317227, "acc_norm": 0.43870967741935485, "acc_norm_stderr": 0.028229497320317227 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6538461538461539, "acc_stderr": 0.0311669573672359, "acc_norm": 0.6538461538461539, "acc_norm_stderr": 0.0311669573672359 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.44150943396226416, "acc_stderr": 0.030561590426731833, "acc_norm": 0.44150943396226416, "acc_norm_stderr": 0.030561590426731833 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.04724577405731572, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.04724577405731572 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.0260671592222758, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.0260671592222758 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436774, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436774 }, "harness|ko_mmlu_sociology|5": { "acc": 0.572139303482587, "acc_stderr": 0.03498541988407795, "acc_norm": 0.572139303482587, "acc_norm_stderr": 0.03498541988407795 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.03681229633394319, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.03681229633394319 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2857142857142857, "acc_stderr": 0.02326651221373057, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.02326651221373057 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04122728707651282, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04122728707651282 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5028901734104047, "acc_stderr": 0.026918645383239015, "acc_norm": 0.5028901734104047, "acc_norm_stderr": 0.026918645383239015 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4539877300613497, "acc_stderr": 0.0391170190467718, "acc_norm": 0.4539877300613497, "acc_norm_stderr": 0.0391170190467718 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4691358024691358, "acc_stderr": 0.027767689606833925, "acc_norm": 0.4691358024691358, "acc_norm_stderr": 0.027767689606833925 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5595854922279793, "acc_stderr": 0.035827245300360945, "acc_norm": 0.5595854922279793, "acc_norm_stderr": 0.035827245300360945 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5541284403669725, "acc_stderr": 0.02131133500970858, "acc_norm": 0.5541284403669725, "acc_norm_stderr": 0.02131133500970858 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2777777777777778, "acc_stderr": 0.040061680838488774, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.040061680838488774 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.45751633986928103, "acc_stderr": 0.02852638345214264, "acc_norm": 0.45751633986928103, "acc_norm_stderr": 0.02852638345214264 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6942148760330579, "acc_stderr": 0.04205953933884124, "acc_norm": 0.6942148760330579, "acc_norm_stderr": 0.04205953933884124 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.45394736842105265, "acc_stderr": 0.04051646342874141, "acc_norm": 0.45394736842105265, "acc_norm_stderr": 0.04051646342874141 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.38562091503267976, "acc_stderr": 0.019691459052354143, "acc_norm": 0.38562091503267976, "acc_norm_stderr": 0.019691459052354143 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3475177304964539, "acc_stderr": 0.028406627809590954, "acc_norm": 0.3475177304964539, "acc_norm_stderr": 0.028406627809590954 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340455, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340455 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.26851851851851855, "acc_stderr": 0.03022522616001242, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.03022522616001242 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.29720670391061454, "acc_stderr": 0.015285313353641599, "acc_norm": 0.29720670391061454, "acc_norm_stderr": 0.015285313353641599 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2867647058823529, "acc_stderr": 0.02747227447323382, "acc_norm": 0.2867647058823529, "acc_norm_stderr": 0.02747227447323382 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4775510204081633, "acc_stderr": 0.031976941187136725, "acc_norm": 0.4775510204081633, "acc_norm_stderr": 0.031976941187136725 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6075949367088608, "acc_stderr": 0.0317847187456473, "acc_norm": 0.6075949367088608, "acc_norm_stderr": 0.0317847187456473 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3318122555410691, "acc_stderr": 0.012026088259897628, "acc_norm": 0.3318122555410691, "acc_norm_stderr": 0.012026088259897628 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5049019607843137, "acc_stderr": 0.035091433756067866, "acc_norm": 0.5049019607843137, "acc_norm_stderr": 0.035091433756067866 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5454545454545454, "acc_stderr": 0.038881769216741004, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.038881769216741004 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27906976744186046, "mc1_stderr": 0.01570210709062788, "mc2": 0.4471086568861838, "mc2_stderr": 0.015281241232491133 }, "harness|ko_commongen_v2|2": { "acc": 0.5466351829988194, "acc_stderr": 0.01711541822522687, "acc_norm": 0.602125147579693, "acc_norm_stderr": 0.016827959054733395 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2", "model_sha": "b8290fa1d56a9ff58d2fecf3f8edd7058eb85502", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }