results / jiwoochris /ko-llama2-v3 /result_2023-10-21 15:59:59.json
open-ko-llm-bot's picture
Add results for 2023-10-21 15:59:59
db15951
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.39505119453924914,
"acc_stderr": 0.014285898292938169,
"acc_norm": 0.45051194539249145,
"acc_norm_stderr": 0.014539646098471625
},
"harness|ko_hellaswag|10": {
"acc": 0.4231228838876718,
"acc_stderr": 0.0049304485271466575,
"acc_norm": 0.5584544911372237,
"acc_norm_stderr": 0.004955564650016177
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5321637426900585,
"acc_stderr": 0.03826882417660369,
"acc_norm": 0.5321637426900585,
"acc_norm_stderr": 0.03826882417660369
},
"harness|ko_mmlu_management|5": {
"acc": 0.5145631067961165,
"acc_stderr": 0.04948637324026637,
"acc_norm": 0.5145631067961165,
"acc_norm_stderr": 0.04948637324026637
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.49936143039591313,
"acc_stderr": 0.017879948914431665,
"acc_norm": 0.49936143039591313,
"acc_norm_stderr": 0.017879948914431665
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.42962962962962964,
"acc_stderr": 0.04276349494376599,
"acc_norm": 0.42962962962962964,
"acc_norm_stderr": 0.04276349494376599
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3574468085106383,
"acc_stderr": 0.03132941789476425,
"acc_norm": 0.3574468085106383,
"acc_norm_stderr": 0.03132941789476425
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4694533762057878,
"acc_stderr": 0.02834504586484068,
"acc_norm": 0.4694533762057878,
"acc_norm_stderr": 0.02834504586484068
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.37668161434977576,
"acc_stderr": 0.03252113489929187,
"acc_norm": 0.37668161434977576,
"acc_norm_stderr": 0.03252113489929187
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48854961832061067,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.48854961832061067,
"acc_norm_stderr": 0.043841400240780176
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5757575757575758,
"acc_stderr": 0.03521224908841586,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03521224908841586
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.04130740879555498,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.04130740879555498
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643966,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643966
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4369747899159664,
"acc_stderr": 0.03221943636566196,
"acc_norm": 0.4369747899159664,
"acc_norm_stderr": 0.03221943636566196
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4076923076923077,
"acc_stderr": 0.02491524398598784,
"acc_norm": 0.4076923076923077,
"acc_norm_stderr": 0.02491524398598784
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.37438423645320196,
"acc_stderr": 0.03405155380561952,
"acc_norm": 0.37438423645320196,
"acc_norm_stderr": 0.03405155380561952
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45806451612903226,
"acc_stderr": 0.028343787250540636,
"acc_norm": 0.45806451612903226,
"acc_norm_stderr": 0.028343787250540636
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6367521367521367,
"acc_stderr": 0.03150712523091264,
"acc_norm": 0.6367521367521367,
"acc_norm_stderr": 0.03150712523091264
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4339622641509434,
"acc_stderr": 0.03050329201334259,
"acc_norm": 0.4339622641509434,
"acc_norm_stderr": 0.03050329201334259
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.509090909090909,
"acc_stderr": 0.04788339768702861,
"acc_norm": 0.509090909090909,
"acc_norm_stderr": 0.04788339768702861
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2814814814814815,
"acc_stderr": 0.02742001935094527,
"acc_norm": 0.2814814814814815,
"acc_norm_stderr": 0.02742001935094527
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6019900497512438,
"acc_stderr": 0.034611994290400135,
"acc_norm": 0.6019900497512438,
"acc_norm_stderr": 0.034611994290400135
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.36416184971098264,
"acc_stderr": 0.03669072477416906,
"acc_norm": 0.36416184971098264,
"acc_norm_stderr": 0.03669072477416906
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.023919984164047736,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.023919984164047736
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.040166600304512336,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.040166600304512336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4393063583815029,
"acc_stderr": 0.026720034380514995,
"acc_norm": 0.4393063583815029,
"acc_norm_stderr": 0.026720034380514995
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4233128834355828,
"acc_stderr": 0.03881891213334383,
"acc_norm": 0.4233128834355828,
"acc_norm_stderr": 0.03881891213334383
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4567901234567901,
"acc_stderr": 0.027716661650194045,
"acc_norm": 0.4567901234567901,
"acc_norm_stderr": 0.027716661650194045
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.46113989637305697,
"acc_stderr": 0.03597524411734578,
"acc_norm": 0.46113989637305697,
"acc_norm_stderr": 0.03597524411734578
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.04142439719489361,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.04142439719489361
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.4954128440366973,
"acc_stderr": 0.021436420955529424,
"acc_norm": 0.4954128440366973,
"acc_norm_stderr": 0.021436420955529424
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.04134913018303316
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.028431095444176647,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.028431095444176647
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5950413223140496,
"acc_stderr": 0.04481137755942469,
"acc_norm": 0.5950413223140496,
"acc_norm_stderr": 0.04481137755942469
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4276315789473684,
"acc_stderr": 0.04026097083296558,
"acc_norm": 0.4276315789473684,
"acc_norm_stderr": 0.04026097083296558
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.018771683893528186,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.018771683893528186
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028121636040639872,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028121636040639872
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.23214285714285715,
"acc_stderr": 0.04007341809755806,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755806
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03114144782353604,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.03114144782353604
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.28308823529411764,
"acc_stderr": 0.0273658611315138,
"acc_norm": 0.28308823529411764,
"acc_norm_stderr": 0.0273658611315138
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5061224489795918,
"acc_stderr": 0.032006820201639065,
"acc_norm": 0.5061224489795918,
"acc_norm_stderr": 0.032006820201639065
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5063291139240507,
"acc_stderr": 0.0325446201076786,
"acc_norm": 0.5063291139240507,
"acc_norm_stderr": 0.0325446201076786
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3089960886571056,
"acc_stderr": 0.011801729777239246,
"acc_norm": 0.3089960886571056,
"acc_norm_stderr": 0.011801729777239246
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.03454236585380609,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.03454236585380609
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5151515151515151,
"acc_stderr": 0.03902551007374448,
"acc_norm": 0.5151515151515151,
"acc_norm_stderr": 0.03902551007374448
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.29498164014687883,
"mc1_stderr": 0.015964400965589678,
"mc2": 0.46187837195291875,
"mc2_stderr": 0.015227305019069102
},
"harness|ko_commongen_v2|2": {
"acc": 0.5041322314049587,
"acc_stderr": 0.01718976703213082,
"acc_norm": 0.5572609208972845,
"acc_norm_stderr": 0.017077254131556224
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "jiwoochris/ko-llama2-v3",
"model_sha": "277462786fe73ea1b6f50d5e45ee1be5854611a1",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}