results / DopeorNope /COKAL_merged_test-v1-13B /result_2023-12-19 08:25:29.json
open-ko-llm-bot's picture
Add results for 2023-12-19 08:25:29
ce568e6
raw
history blame
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.46075085324232085,
"acc_stderr": 0.014566303676636581,
"acc_norm": 0.514505119453925,
"acc_norm_stderr": 0.014605241081370053
},
"harness|ko_hellaswag|10": {
"acc": 0.4541923919537941,
"acc_stderr": 0.004968796800410414,
"acc_norm": 0.6054570802628958,
"acc_norm_stderr": 0.004877534215987093
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5614035087719298,
"acc_stderr": 0.038057975055904594,
"acc_norm": 0.5614035087719298,
"acc_norm_stderr": 0.038057975055904594
},
"harness|ko_mmlu_management|5": {
"acc": 0.5533980582524272,
"acc_stderr": 0.04922424153458934,
"acc_norm": 0.5533980582524272,
"acc_norm_stderr": 0.04922424153458934
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5670498084291188,
"acc_stderr": 0.017718469101513985,
"acc_norm": 0.5670498084291188,
"acc_norm_stderr": 0.017718469101513985
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750574,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750574
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4127659574468085,
"acc_stderr": 0.03218471141400351,
"acc_norm": 0.4127659574468085,
"acc_norm_stderr": 0.03218471141400351
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4397590361445783,
"acc_stderr": 0.03864139923699122,
"acc_norm": 0.4397590361445783,
"acc_norm_stderr": 0.03864139923699122
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5337620578778135,
"acc_stderr": 0.028333277109562804,
"acc_norm": 0.5337620578778135,
"acc_norm_stderr": 0.028333277109562804
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5605381165919282,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.5605381165919282,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4580152671755725,
"acc_stderr": 0.04369802690578756,
"acc_norm": 0.4580152671755725,
"acc_norm_stderr": 0.04369802690578756
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.03502975799413007,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.03502975799413007
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.41379310344827586,
"acc_stderr": 0.04104269211806232,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.04104269211806232
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5,
"acc_stderr": 0.032478490123081544,
"acc_norm": 0.5,
"acc_norm_stderr": 0.032478490123081544
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4641025641025641,
"acc_stderr": 0.02528558599001783,
"acc_norm": 0.4641025641025641,
"acc_norm_stderr": 0.02528558599001783
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.41379310344827586,
"acc_stderr": 0.03465304488406796,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.03465304488406796
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4870967741935484,
"acc_stderr": 0.028434533152681848,
"acc_norm": 0.4870967741935484,
"acc_norm_stderr": 0.028434533152681848
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6410256410256411,
"acc_stderr": 0.03142616993791924,
"acc_norm": 0.6410256410256411,
"acc_norm_stderr": 0.03142616993791924
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.44528301886792454,
"acc_stderr": 0.030588052974270655,
"acc_norm": 0.44528301886792454,
"acc_norm_stderr": 0.030588052974270655
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5545454545454546,
"acc_stderr": 0.047605488214603246,
"acc_norm": 0.5545454545454546,
"acc_norm_stderr": 0.047605488214603246
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.02684205787383371,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2781456953642384,
"acc_stderr": 0.03658603262763743,
"acc_norm": 0.2781456953642384,
"acc_norm_stderr": 0.03658603262763743
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6069651741293532,
"acc_stderr": 0.0345368246603156,
"acc_norm": 0.6069651741293532,
"acc_norm_stderr": 0.0345368246603156
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3872832369942196,
"acc_stderr": 0.03714325906302064,
"acc_norm": 0.3872832369942196,
"acc_norm_stderr": 0.03714325906302064
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.02380952380952385,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.02380952380952385
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3680555555555556,
"acc_stderr": 0.040329990539607195,
"acc_norm": 0.3680555555555556,
"acc_norm_stderr": 0.040329990539607195
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.67,
"acc_stderr": 0.047258156262526066,
"acc_norm": 0.67,
"acc_norm_stderr": 0.047258156262526066
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5491329479768786,
"acc_stderr": 0.026788811931562767,
"acc_norm": 0.5491329479768786,
"acc_norm_stderr": 0.026788811931562767
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5460122699386503,
"acc_stderr": 0.0391170190467718,
"acc_norm": 0.5460122699386503,
"acc_norm_stderr": 0.0391170190467718
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4845679012345679,
"acc_stderr": 0.0278074900442762,
"acc_norm": 0.4845679012345679,
"acc_norm_stderr": 0.0278074900442762
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5803108808290155,
"acc_stderr": 0.035615873276858834,
"acc_norm": 0.5803108808290155,
"acc_norm_stderr": 0.035615873276858834
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022058,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022058
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5834862385321101,
"acc_stderr": 0.021136376504030874,
"acc_norm": 0.5834862385321101,
"acc_norm_stderr": 0.021136376504030874
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.03852273364924318,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924318
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.027914055510467998,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.027914055510467998
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6611570247933884,
"acc_stderr": 0.04320767807536669,
"acc_norm": 0.6611570247933884,
"acc_norm_stderr": 0.04320767807536669
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3815789473684211,
"acc_stderr": 0.03953173377749194,
"acc_norm": 0.3815789473684211,
"acc_norm_stderr": 0.03953173377749194
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.39705882352941174,
"acc_stderr": 0.019794488900024106,
"acc_norm": 0.39705882352941174,
"acc_norm_stderr": 0.019794488900024106
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3404255319148936,
"acc_stderr": 0.02826765748265014,
"acc_norm": 0.3404255319148936,
"acc_norm_stderr": 0.02826765748265014
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.04203277291467762,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.04203277291467762
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.03225941352631295
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4264705882352941,
"acc_stderr": 0.030042615832714847,
"acc_norm": 0.4264705882352941,
"acc_norm_stderr": 0.030042615832714847
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4857142857142857,
"acc_stderr": 0.03199615232806287,
"acc_norm": 0.4857142857142857,
"acc_norm_stderr": 0.03199615232806287
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6497890295358649,
"acc_stderr": 0.03105239193758435,
"acc_norm": 0.6497890295358649,
"acc_norm_stderr": 0.03105239193758435
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.35528031290743156,
"acc_stderr": 0.012223623364044043,
"acc_norm": 0.35528031290743156,
"acc_norm_stderr": 0.012223623364044043
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5490196078431373,
"acc_stderr": 0.034924061041636124,
"acc_norm": 0.5490196078431373,
"acc_norm_stderr": 0.034924061041636124
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6,
"acc_stderr": 0.038254602783800266,
"acc_norm": 0.6,
"acc_norm_stderr": 0.038254602783800266
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3292533659730722,
"mc1_stderr": 0.01645126444006824,
"mc2": 0.4904973367131087,
"mc2_stderr": 0.015682971125946653
},
"harness|ko_commongen_v2|2": {
"acc": 0.5407319952774499,
"acc_stderr": 0.01713321827653767,
"acc_norm": 0.577331759149941,
"acc_norm_stderr": 0.016983506079577604
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DopeorNope/COKAL_merged_test-v1-13B",
"model_sha": "4164e460dbf37491becf4f987dedaa0628cadbdd",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}