results / Jenti-Kaeri /ko-llama2-13b-OrcaPlatypus /result_2023-11-06 07:34:14.json
open-ko-llm-bot's picture
Add results for 2023-11-06 07:34:14
35445d3
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.36860068259385664,
"acc_stderr": 0.014097810678042194,
"acc_norm": 0.4249146757679181,
"acc_norm_stderr": 0.014445698968520769
},
"harness|ko_hellaswag|10": {
"acc": 0.40659231228838877,
"acc_stderr": 0.00490193651154613,
"acc_norm": 0.5416251742680741,
"acc_norm_stderr": 0.004972460206842306
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5146198830409356,
"acc_stderr": 0.038331852752130254,
"acc_norm": 0.5146198830409356,
"acc_norm_stderr": 0.038331852752130254
},
"harness|ko_mmlu_management|5": {
"acc": 0.49514563106796117,
"acc_stderr": 0.049505043821289195,
"acc_norm": 0.49514563106796117,
"acc_norm_stderr": 0.049505043821289195
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5070242656449553,
"acc_stderr": 0.017878199003432214,
"acc_norm": 0.5070242656449553,
"acc_norm_stderr": 0.017878199003432214
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3851851851851852,
"acc_stderr": 0.042039210401562783,
"acc_norm": 0.3851851851851852,
"acc_norm_stderr": 0.042039210401562783
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.33617021276595743,
"acc_stderr": 0.030881618520676942,
"acc_norm": 0.33617021276595743,
"acc_norm_stderr": 0.030881618520676942
},
"harness|ko_mmlu_virology|5": {
"acc": 0.40963855421686746,
"acc_stderr": 0.038284011150790206,
"acc_norm": 0.40963855421686746,
"acc_norm_stderr": 0.038284011150790206
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4565916398713826,
"acc_stderr": 0.028290869054197598,
"acc_norm": 0.4565916398713826,
"acc_norm_stderr": 0.028290869054197598
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.37668161434977576,
"acc_stderr": 0.032521134899291884,
"acc_norm": 0.37668161434977576,
"acc_norm_stderr": 0.032521134899291884
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5114503816793893,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.5114503816793893,
"acc_norm_stderr": 0.043841400240780176
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621503,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621503
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.4696969696969697,
"acc_stderr": 0.03555804051763929,
"acc_norm": 0.4696969696969697,
"acc_norm_stderr": 0.03555804051763929
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3793103448275862,
"acc_stderr": 0.04043461861916747,
"acc_norm": 0.3793103448275862,
"acc_norm_stderr": 0.04043461861916747
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.044405219061793275,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.044405219061793275
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3907563025210084,
"acc_stderr": 0.031693802357129965,
"acc_norm": 0.3907563025210084,
"acc_norm_stderr": 0.031693802357129965
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.37948717948717947,
"acc_stderr": 0.024603626924097413,
"acc_norm": 0.37948717948717947,
"acc_norm_stderr": 0.024603626924097413
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.0478034362693679,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.0478034362693679
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3645320197044335,
"acc_stderr": 0.0338640574606209,
"acc_norm": 0.3645320197044335,
"acc_norm_stderr": 0.0338640574606209
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4483870967741935,
"acc_stderr": 0.028292056830112735,
"acc_norm": 0.4483870967741935,
"acc_norm_stderr": 0.028292056830112735
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.594017094017094,
"acc_stderr": 0.03217180182641086,
"acc_norm": 0.594017094017094,
"acc_norm_stderr": 0.03217180182641086
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4339622641509434,
"acc_stderr": 0.0305032920133426,
"acc_norm": 0.4339622641509434,
"acc_norm_stderr": 0.0305032920133426
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.44545454545454544,
"acc_stderr": 0.047605488214603246,
"acc_norm": 0.44545454545454544,
"acc_norm_stderr": 0.047605488214603246
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.02794045713622841,
"acc_norm": 0.3,
"acc_norm_stderr": 0.02794045713622841
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.037101857261199946,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.037101857261199946
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5522388059701493,
"acc_stderr": 0.03516184772952167,
"acc_norm": 0.5522388059701493,
"acc_norm_stderr": 0.03516184772952167
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3583815028901734,
"acc_stderr": 0.03656343653353159,
"acc_norm": 0.3583815028901734,
"acc_norm_stderr": 0.03656343653353159
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.31216931216931215,
"acc_stderr": 0.023865206836972592,
"acc_norm": 0.31216931216931215,
"acc_norm_stderr": 0.023865206836972592
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3194444444444444,
"acc_stderr": 0.03899073687357335,
"acc_norm": 0.3194444444444444,
"acc_norm_stderr": 0.03899073687357335
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4277456647398844,
"acc_stderr": 0.026636539741116072,
"acc_norm": 0.4277456647398844,
"acc_norm_stderr": 0.026636539741116072
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3619631901840491,
"acc_stderr": 0.037757007291414416,
"acc_norm": 0.3619631901840491,
"acc_norm_stderr": 0.037757007291414416
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4660493827160494,
"acc_stderr": 0.02775653525734767,
"acc_norm": 0.4660493827160494,
"acc_norm_stderr": 0.02775653525734767
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.45595854922279794,
"acc_stderr": 0.03594413711272436,
"acc_norm": 0.45595854922279794,
"acc_norm_stderr": 0.03594413711272436
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.03947152782669415
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.46972477064220186,
"acc_stderr": 0.021397988604936965,
"acc_norm": 0.46972477064220186,
"acc_norm_stderr": 0.021397988604936965
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.040406101782088394,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.040406101782088394
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.434640522875817,
"acc_stderr": 0.028384256704883034,
"acc_norm": 0.434640522875817,
"acc_norm_stderr": 0.028384256704883034
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5371900826446281,
"acc_stderr": 0.04551711196104218,
"acc_norm": 0.5371900826446281,
"acc_norm_stderr": 0.04551711196104218
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.45394736842105265,
"acc_stderr": 0.040516463428741406,
"acc_norm": 0.45394736842105265,
"acc_norm_stderr": 0.040516463428741406
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.31209150326797386,
"acc_stderr": 0.018745011201277657,
"acc_norm": 0.31209150326797386,
"acc_norm_stderr": 0.018745011201277657
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3120567375886525,
"acc_stderr": 0.027640120545169945,
"acc_norm": 0.3120567375886525,
"acc_norm_stderr": 0.027640120545169945
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.1875,
"acc_stderr": 0.0370468111477387,
"acc_norm": 0.1875,
"acc_norm_stderr": 0.0370468111477387
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3287037037037037,
"acc_stderr": 0.032036140846700596,
"acc_norm": 0.3287037037037037,
"acc_norm_stderr": 0.032036140846700596
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.22426470588235295,
"acc_stderr": 0.025336848563332338,
"acc_norm": 0.22426470588235295,
"acc_norm_stderr": 0.025336848563332338
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4897959183673469,
"acc_stderr": 0.03200255347893782,
"acc_norm": 0.4897959183673469,
"acc_norm_stderr": 0.03200255347893782
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.48945147679324896,
"acc_stderr": 0.032539983791662855,
"acc_norm": 0.48945147679324896,
"acc_norm_stderr": 0.032539983791662855
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.29726205997392435,
"acc_stderr": 0.011673346173086048,
"acc_norm": 0.29726205997392435,
"acc_norm_stderr": 0.011673346173086048
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.034602283272391704,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.034602283272391704
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.49696969696969695,
"acc_stderr": 0.03904272341431856,
"acc_norm": 0.49696969696969695,
"acc_norm_stderr": 0.03904272341431856
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.26805385556915545,
"mc1_stderr": 0.015506204722834559,
"mc2": 0.4313245637601363,
"mc2_stderr": 0.01494158153176466
},
"harness|ko_commongen_v2|2": {
"acc": 0.44510035419126326,
"acc_stderr": 0.017086417431005474,
"acc_norm": 0.5360094451003542,
"acc_norm_stderr": 0.017145715365486664
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus",
"model_sha": "80952bf913ab217ee77ee0328f3c9e68cc1abf22",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}