results
/
DBCMLAB
/Llama-3-instruction-constructionsafety-layertuning
/result_2024-05-24 06:09:39.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.4257679180887372, | |
"acc_stderr": 0.01444946427886881, | |
"acc_norm": 0.4812286689419795, | |
"acc_norm_stderr": 0.014601090150633964 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4213304122684724, | |
"acc_stderr": 0.004927631806477561, | |
"acc_norm": 0.5729934276040629, | |
"acc_norm_stderr": 0.004936323537147931 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5847953216374269, | |
"acc_stderr": 0.03779275945503201, | |
"acc_norm": 0.5847953216374269, | |
"acc_norm_stderr": 0.03779275945503201 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.5728155339805825, | |
"acc_stderr": 0.04897957737781168, | |
"acc_norm": 0.5728155339805825, | |
"acc_norm_stderr": 0.04897957737781168 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.611749680715198, | |
"acc_stderr": 0.017427673295544326, | |
"acc_norm": 0.611749680715198, | |
"acc_norm_stderr": 0.017427673295544326 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4222222222222222, | |
"acc_stderr": 0.042667634040995814, | |
"acc_norm": 0.4222222222222222, | |
"acc_norm_stderr": 0.042667634040995814 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.04512608598542127, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.04512608598542127 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.4425531914893617, | |
"acc_stderr": 0.032469569197899575, | |
"acc_norm": 0.4425531914893617, | |
"acc_norm_stderr": 0.032469569197899575 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.03892494720807614, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.03892494720807614 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5594855305466238, | |
"acc_stderr": 0.028196400574197422, | |
"acc_norm": 0.5594855305466238, | |
"acc_norm_stderr": 0.028196400574197422 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.5246636771300448, | |
"acc_stderr": 0.03351695167652628, | |
"acc_norm": 0.5246636771300448, | |
"acc_norm_stderr": 0.03351695167652628 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.549618320610687, | |
"acc_stderr": 0.04363643698524779, | |
"acc_norm": 0.549618320610687, | |
"acc_norm_stderr": 0.04363643698524779 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.49, | |
"acc_stderr": 0.05024183937956914, | |
"acc_norm": 0.49, | |
"acc_norm_stderr": 0.05024183937956914 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.6262626262626263, | |
"acc_stderr": 0.034468977386593325, | |
"acc_norm": 0.6262626262626263, | |
"acc_norm_stderr": 0.034468977386593325 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4827586206896552, | |
"acc_stderr": 0.04164188720169377, | |
"acc_norm": 0.4827586206896552, | |
"acc_norm_stderr": 0.04164188720169377 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.20588235294117646, | |
"acc_stderr": 0.040233822736177476, | |
"acc_norm": 0.20588235294117646, | |
"acc_norm_stderr": 0.040233822736177476 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5168067226890757, | |
"acc_stderr": 0.03246013680375308, | |
"acc_norm": 0.5168067226890757, | |
"acc_norm_stderr": 0.03246013680375308 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.48205128205128206, | |
"acc_stderr": 0.02533466708095496, | |
"acc_norm": 0.48205128205128206, | |
"acc_norm_stderr": 0.02533466708095496 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.6, | |
"acc_stderr": 0.04923659639173309, | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.04923659639173309 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5740740740740741, | |
"acc_stderr": 0.0478034362693679, | |
"acc_norm": 0.5740740740740741, | |
"acc_norm_stderr": 0.0478034362693679 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.37438423645320196, | |
"acc_stderr": 0.03405155380561952, | |
"acc_norm": 0.37438423645320196, | |
"acc_norm_stderr": 0.03405155380561952 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5483870967741935, | |
"acc_stderr": 0.02831050034856839, | |
"acc_norm": 0.5483870967741935, | |
"acc_norm_stderr": 0.02831050034856839 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.7051282051282052, | |
"acc_stderr": 0.029872577708891176, | |
"acc_norm": 0.7051282051282052, | |
"acc_norm_stderr": 0.029872577708891176 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.4867924528301887, | |
"acc_stderr": 0.030762134874500482, | |
"acc_norm": 0.4867924528301887, | |
"acc_norm_stderr": 0.030762134874500482 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.4909090909090909, | |
"acc_stderr": 0.04788339768702861, | |
"acc_norm": 0.4909090909090909, | |
"acc_norm_stderr": 0.04788339768702861 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.3296296296296296, | |
"acc_stderr": 0.028661201116524593, | |
"acc_norm": 0.3296296296296296, | |
"acc_norm_stderr": 0.028661201116524593 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.3708609271523179, | |
"acc_stderr": 0.03943966699183629, | |
"acc_norm": 0.3708609271523179, | |
"acc_norm_stderr": 0.03943966699183629 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6965174129353234, | |
"acc_stderr": 0.03251006816458618, | |
"acc_norm": 0.6965174129353234, | |
"acc_norm_stderr": 0.03251006816458618 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.4161849710982659, | |
"acc_stderr": 0.037585177754049466, | |
"acc_norm": 0.4161849710982659, | |
"acc_norm_stderr": 0.037585177754049466 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.30158730158730157, | |
"acc_stderr": 0.023636975996101813, | |
"acc_norm": 0.30158730158730157, | |
"acc_norm_stderr": 0.023636975996101813 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.4583333333333333, | |
"acc_stderr": 0.04166666666666665, | |
"acc_norm": 0.4583333333333333, | |
"acc_norm_stderr": 0.04166666666666665 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001974, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001974 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.7, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.7, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5202312138728323, | |
"acc_stderr": 0.026897049996382875, | |
"acc_norm": 0.5202312138728323, | |
"acc_norm_stderr": 0.026897049996382875 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.50920245398773, | |
"acc_stderr": 0.03927705600787443, | |
"acc_norm": 0.50920245398773, | |
"acc_norm_stderr": 0.03927705600787443 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.5246913580246914, | |
"acc_stderr": 0.027786800931427436, | |
"acc_norm": 0.5246913580246914, | |
"acc_norm_stderr": 0.027786800931427436 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.616580310880829, | |
"acc_stderr": 0.03508984236295342, | |
"acc_norm": 0.616580310880829, | |
"acc_norm_stderr": 0.03508984236295342 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2894736842105263, | |
"acc_stderr": 0.04266339443159394, | |
"acc_norm": 0.2894736842105263, | |
"acc_norm_stderr": 0.04266339443159394 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.634862385321101, | |
"acc_stderr": 0.020642801454383995, | |
"acc_norm": 0.634862385321101, | |
"acc_norm_stderr": 0.020642801454383995 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.31746031746031744, | |
"acc_stderr": 0.0416345303130286, | |
"acc_norm": 0.31746031746031744, | |
"acc_norm_stderr": 0.0416345303130286 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.5294117647058824, | |
"acc_stderr": 0.028580341065138286, | |
"acc_norm": 0.5294117647058824, | |
"acc_norm_stderr": 0.028580341065138286 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.46, | |
"acc_stderr": 0.05009082659620333, | |
"acc_norm": 0.46, | |
"acc_norm_stderr": 0.05009082659620333 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6446280991735537, | |
"acc_stderr": 0.04369236326573981, | |
"acc_norm": 0.6446280991735537, | |
"acc_norm_stderr": 0.04369236326573981 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.4473684210526316, | |
"acc_stderr": 0.04046336883978252, | |
"acc_norm": 0.4473684210526316, | |
"acc_norm_stderr": 0.04046336883978252 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.4722222222222222, | |
"acc_stderr": 0.020196594933541197, | |
"acc_norm": 0.4722222222222222, | |
"acc_norm_stderr": 0.020196594933541197 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.33687943262411346, | |
"acc_stderr": 0.02819553487396673, | |
"acc_norm": 0.33687943262411346, | |
"acc_norm_stderr": 0.02819553487396673 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.4107142857142857, | |
"acc_stderr": 0.04669510663875191, | |
"acc_norm": 0.4107142857142857, | |
"acc_norm_stderr": 0.04669510663875191 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.4212962962962963, | |
"acc_stderr": 0.03367462138896078, | |
"acc_norm": 0.4212962962962963, | |
"acc_norm_stderr": 0.03367462138896078 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2346368715083799, | |
"acc_stderr": 0.014173044098303675, | |
"acc_norm": 0.2346368715083799, | |
"acc_norm_stderr": 0.014173044098303675 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.51, | |
"acc_stderr": 0.05024183937956912, | |
"acc_norm": 0.51, | |
"acc_norm_stderr": 0.05024183937956912 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.47794117647058826, | |
"acc_stderr": 0.030343264224213528, | |
"acc_norm": 0.47794117647058826, | |
"acc_norm_stderr": 0.030343264224213528 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.49387755102040815, | |
"acc_stderr": 0.03200682020163908, | |
"acc_norm": 0.49387755102040815, | |
"acc_norm_stderr": 0.03200682020163908 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6708860759493671, | |
"acc_stderr": 0.03058732629470237, | |
"acc_norm": 0.6708860759493671, | |
"acc_norm_stderr": 0.03058732629470237 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.34419817470664926, | |
"acc_stderr": 0.012134433741002575, | |
"acc_norm": 0.34419817470664926, | |
"acc_norm_stderr": 0.012134433741002575 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.5931372549019608, | |
"acc_stderr": 0.03447891136353383, | |
"acc_norm": 0.5931372549019608, | |
"acc_norm_stderr": 0.03447891136353383 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.6606060606060606, | |
"acc_stderr": 0.03697442205031595, | |
"acc_norm": 0.6606060606060606, | |
"acc_norm_stderr": 0.03697442205031595 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.26805385556915545, | |
"mc1_stderr": 0.01550620472283456, | |
"mc2": 0.4196920864518041, | |
"mc2_stderr": 0.015083569722000319 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4510035419126328, | |
"acc_stderr": 0.017107618859549346, | |
"acc_norm": 0.4817001180637544, | |
"acc_norm_stderr": 0.01717883663917776 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "DBCMLAB/Llama-3-instruction-constructionsafety-layertuning", | |
"model_sha": "cce37ef6a6ecf95e6995b7901dc53ea332732d1a", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |