results / DopeorNope /ZeroCoka-7B /result_2023-10-11 12:06:32.json
open-ko-llm-bot's picture
Add results for 2023-10-11 12:06:32
6a05a3b
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.27986348122866894,
"acc_stderr": 0.013119040897725923,
"acc_norm": 0.3455631399317406,
"acc_norm_stderr": 0.013896938461145687
},
"harness|ko_hellaswag|10": {
"acc": 0.36566421031667,
"acc_stderr": 0.0048063163427093936,
"acc_norm": 0.48466440948018324,
"acc_norm_stderr": 0.004987433862274562
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.39766081871345027,
"acc_stderr": 0.0375363895576169,
"acc_norm": 0.39766081871345027,
"acc_norm_stderr": 0.0375363895576169
},
"harness|ko_mmlu_management|5": {
"acc": 0.32038834951456313,
"acc_stderr": 0.0462028408228004,
"acc_norm": 0.32038834951456313,
"acc_norm_stderr": 0.0462028408228004
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.41890166028097064,
"acc_stderr": 0.017643205052377185,
"acc_norm": 0.41890166028097064,
"acc_norm_stderr": 0.017643205052377185
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.362962962962963,
"acc_stderr": 0.041539484047424004,
"acc_norm": 0.362962962962963,
"acc_norm_stderr": 0.041539484047424004
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.28936170212765955,
"acc_stderr": 0.029644006577009618,
"acc_norm": 0.28936170212765955,
"acc_norm_stderr": 0.029644006577009618
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3132530120481928,
"acc_stderr": 0.036108050180310235,
"acc_norm": 0.3132530120481928,
"acc_norm_stderr": 0.036108050180310235
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3858520900321543,
"acc_stderr": 0.027648149599751464,
"acc_norm": 0.3858520900321543,
"acc_norm_stderr": 0.027648149599751464
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3721973094170404,
"acc_stderr": 0.03244305283008731,
"acc_norm": 0.3721973094170404,
"acc_norm_stderr": 0.03244305283008731
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.45038167938931295,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.45038167938931295,
"acc_norm_stderr": 0.04363643698524779
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3838383838383838,
"acc_stderr": 0.03464881675016339,
"acc_norm": 0.3838383838383838,
"acc_norm_stderr": 0.03464881675016339
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4206896551724138,
"acc_stderr": 0.0411391498118926,
"acc_norm": 0.4206896551724138,
"acc_norm_stderr": 0.0411391498118926
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.20588235294117646,
"acc_stderr": 0.04023382273617746,
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617746
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3865546218487395,
"acc_stderr": 0.0316314580755238,
"acc_norm": 0.3865546218487395,
"acc_norm_stderr": 0.0316314580755238
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3076923076923077,
"acc_stderr": 0.02340092891831049,
"acc_norm": 0.3076923076923077,
"acc_norm_stderr": 0.02340092891831049
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.04820403072760628,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.04820403072760628
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2561576354679803,
"acc_stderr": 0.0307127300709826,
"acc_norm": 0.2561576354679803,
"acc_norm_stderr": 0.0307127300709826
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3709677419354839,
"acc_stderr": 0.02748054188795359,
"acc_norm": 0.3709677419354839,
"acc_norm_stderr": 0.02748054188795359
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.4829059829059829,
"acc_stderr": 0.032736940493481824,
"acc_norm": 0.4829059829059829,
"acc_norm_stderr": 0.032736940493481824
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3471698113207547,
"acc_stderr": 0.02930010170554965,
"acc_norm": 0.3471698113207547,
"acc_norm_stderr": 0.02930010170554965
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.33636363636363636,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.33636363636363636,
"acc_norm_stderr": 0.04525393596302506
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23333333333333334,
"acc_stderr": 0.02578787422095932,
"acc_norm": 0.23333333333333334,
"acc_norm_stderr": 0.02578787422095932
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.03710185726119996,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.03710185726119996
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.4228855721393035,
"acc_stderr": 0.03493231777421282,
"acc_norm": 0.4228855721393035,
"acc_norm_stderr": 0.03493231777421282
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3063583815028902,
"acc_stderr": 0.03514942551267437,
"acc_norm": 0.3063583815028902,
"acc_norm_stderr": 0.03514942551267437
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.0220190800122179,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.0220190800122179
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3125,
"acc_stderr": 0.038760854559127644,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.038760854559127644
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.38439306358381503,
"acc_stderr": 0.026189666966272035,
"acc_norm": 0.38439306358381503,
"acc_norm_stderr": 0.026189666966272035
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3067484662576687,
"acc_stderr": 0.03623089915724145,
"acc_norm": 0.3067484662576687,
"acc_norm_stderr": 0.03623089915724145
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.39197530864197533,
"acc_stderr": 0.027163686038271226,
"acc_norm": 0.39197530864197533,
"acc_norm_stderr": 0.027163686038271226
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.38860103626943004,
"acc_stderr": 0.03517739796373132,
"acc_norm": 0.38860103626943004,
"acc_norm_stderr": 0.03517739796373132
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512322
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3779816513761468,
"acc_stderr": 0.02078918706672811,
"acc_norm": 0.3779816513761468,
"acc_norm_stderr": 0.02078918706672811
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.03893259610604672,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.03893259610604672
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.45098039215686275,
"acc_stderr": 0.028491993586171566,
"acc_norm": 0.45098039215686275,
"acc_norm_stderr": 0.028491993586171566
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5041322314049587,
"acc_stderr": 0.045641987674327526,
"acc_norm": 0.5041322314049587,
"acc_norm_stderr": 0.045641987674327526
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.32894736842105265,
"acc_stderr": 0.03823428969926605,
"acc_norm": 0.32894736842105265,
"acc_norm_stderr": 0.03823428969926605
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3104575163398693,
"acc_stderr": 0.018718067052623216,
"acc_norm": 0.3104575163398693,
"acc_norm_stderr": 0.018718067052623216
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.32269503546099293,
"acc_stderr": 0.027889139300534778,
"acc_norm": 0.32269503546099293,
"acc_norm_stderr": 0.027889139300534778
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25,
"acc_stderr": 0.04109974682633932,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04109974682633932
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3425925925925926,
"acc_stderr": 0.03236585252602157,
"acc_norm": 0.3425925925925926,
"acc_norm_stderr": 0.03236585252602157
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24804469273743016,
"acc_stderr": 0.014444157808261446,
"acc_norm": 0.24804469273743016,
"acc_norm_stderr": 0.014444157808261446
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.029896163033125474,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.029896163033125474
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3142857142857143,
"acc_stderr": 0.029719329422417468,
"acc_norm": 0.3142857142857143,
"acc_norm_stderr": 0.029719329422417468
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.45147679324894513,
"acc_stderr": 0.0323936001739747,
"acc_norm": 0.45147679324894513,
"acc_norm_stderr": 0.0323936001739747
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3135593220338983,
"acc_stderr": 0.01184923429145932,
"acc_norm": 0.3135593220338983,
"acc_norm_stderr": 0.01184923429145932
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.36764705882352944,
"acc_stderr": 0.03384132045674118,
"acc_norm": 0.36764705882352944,
"acc_norm_stderr": 0.03384132045674118
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3878787878787879,
"acc_stderr": 0.038049136539710114,
"acc_norm": 0.3878787878787879,
"acc_norm_stderr": 0.038049136539710114
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2386780905752754,
"mc1_stderr": 0.014922629695456411,
"mc2": 0.3826229918315052,
"mc2_stderr": 0.015120737226444851
},
"harness|ko_commongen_v2|2": {
"acc": 0.21251475796930341,
"acc_stderr": 0.014064703386174934,
"acc_norm": 0.29988193624557263,
"acc_norm_stderr": 0.015753447615429458
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DopeorNope/ZeroCoka-7B",
"model_sha": "3025135b08f7d052531fcd8f6a4a5a97e4e25c76",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}