results / nlpai-lab /kullm-polyglot-12.8b-v2 /result_2023-09-27 05:32:23.json
open-ko-llm-bot's picture
Add results for 2023-09-27 05:32:23
2205e0d
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2790102389078498,
"acc_stderr": 0.013106784883601346,
"acc_norm": 0.32764505119453924,
"acc_norm_stderr": 0.013715847940719344
},
"harness|ko_hellaswag|10": {
"acc": 0.386476797450707,
"acc_stderr": 0.004859467984155259,
"acc_norm": 0.4987054371639116,
"acc_norm_stderr": 0.00498976468673883
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.03508771929824565,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.03508771929824565
},
"harness|ko_mmlu_management|5": {
"acc": 0.18446601941747573,
"acc_stderr": 0.03840423627288276,
"acc_norm": 0.18446601941747573,
"acc_norm_stderr": 0.03840423627288276
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.24776500638569604,
"acc_stderr": 0.015438083080568961,
"acc_norm": 0.24776500638569604,
"acc_norm_stderr": 0.015438083080568961
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.18518518518518517,
"acc_stderr": 0.03355677216313141,
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.03355677216313141
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.28936170212765955,
"acc_stderr": 0.02964400657700962,
"acc_norm": 0.28936170212765955,
"acc_norm_stderr": 0.02964400657700962
},
"harness|ko_mmlu_virology|5": {
"acc": 0.25301204819277107,
"acc_stderr": 0.03384429155233136,
"acc_norm": 0.25301204819277107,
"acc_norm_stderr": 0.03384429155233136
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2958199356913183,
"acc_stderr": 0.025922371788818784,
"acc_norm": 0.2958199356913183,
"acc_norm_stderr": 0.025922371788818784
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.21076233183856502,
"acc_stderr": 0.027373095500540193,
"acc_norm": 0.21076233183856502,
"acc_norm_stderr": 0.027373095500540193
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2824427480916031,
"acc_stderr": 0.03948406125768361,
"acc_norm": 0.2824427480916031,
"acc_norm_stderr": 0.03948406125768361
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.25252525252525254,
"acc_stderr": 0.030954055470365907,
"acc_norm": 0.25252525252525254,
"acc_norm_stderr": 0.030954055470365907
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2482758620689655,
"acc_stderr": 0.03600105692727772,
"acc_norm": 0.2482758620689655,
"acc_norm_stderr": 0.03600105692727772
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201942,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201942
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.25210084033613445,
"acc_stderr": 0.028205545033277726,
"acc_norm": 0.25210084033613445,
"acc_norm_stderr": 0.028205545033277726
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.02242127361292371,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.02242127361292371
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946336,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.042365112580946336
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2315270935960591,
"acc_stderr": 0.02967833314144444,
"acc_norm": 0.2315270935960591,
"acc_norm_stderr": 0.02967833314144444
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.1967741935483871,
"acc_stderr": 0.02261640942074203,
"acc_norm": 0.1967741935483871,
"acc_norm_stderr": 0.02261640942074203
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.28205128205128205,
"acc_stderr": 0.02948036054954119,
"acc_norm": 0.28205128205128205,
"acc_norm_stderr": 0.02948036054954119
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.25660377358490566,
"acc_stderr": 0.026880647889051982,
"acc_norm": 0.25660377358490566,
"acc_norm_stderr": 0.026880647889051982
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.04069306319721375,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.04069306319721375
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.025928876132766104,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.025928876132766104
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.21890547263681592,
"acc_stderr": 0.029239174636647,
"acc_norm": 0.21890547263681592,
"acc_norm_stderr": 0.029239174636647
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.23699421965317918,
"acc_stderr": 0.03242414757483098,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.03242414757483098
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.02264421261552521,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.02264421261552521
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2361111111111111,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.2361111111111111,
"acc_norm_stderr": 0.03551446610810826
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.2,
"acc_stderr": 0.040201512610368445,
"acc_norm": 0.2,
"acc_norm_stderr": 0.040201512610368445
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.24566473988439305,
"acc_stderr": 0.023176298203992005,
"acc_norm": 0.24566473988439305,
"acc_norm_stderr": 0.023176298203992005
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.32515337423312884,
"acc_stderr": 0.036803503712864616,
"acc_norm": 0.32515337423312884,
"acc_norm_stderr": 0.036803503712864616
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.25617283950617287,
"acc_stderr": 0.024288533637726095,
"acc_norm": 0.25617283950617287,
"acc_norm_stderr": 0.024288533637726095
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.29533678756476683,
"acc_stderr": 0.03292296639155139,
"acc_norm": 0.29533678756476683,
"acc_norm_stderr": 0.03292296639155139
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.04142439719489362,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.04142439719489362
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.25137614678899084,
"acc_stderr": 0.018599206360287415,
"acc_norm": 0.25137614678899084,
"acc_norm_stderr": 0.018599206360287415
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.20634920634920634,
"acc_stderr": 0.036196045241242515,
"acc_norm": 0.20634920634920634,
"acc_norm_stderr": 0.036196045241242515
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.024288619466046102,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.024288619466046102
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.19,
"acc_stderr": 0.03942772444036623,
"acc_norm": 0.19,
"acc_norm_stderr": 0.03942772444036623
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.18181818181818182,
"acc_stderr": 0.035208939510976534,
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.035208939510976534
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.03110318238312338,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.03110318238312338
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.017401816711427657,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.017401816711427657
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.23049645390070922,
"acc_stderr": 0.025123739226872405,
"acc_norm": 0.23049645390070922,
"acc_norm_stderr": 0.025123739226872405
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285713,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285713
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.03350991604696043,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.03350991604696043
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23687150837988827,
"acc_stderr": 0.014219570788103987,
"acc_norm": 0.23687150837988827,
"acc_norm_stderr": 0.014219570788103987
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.1875,
"acc_stderr": 0.023709788253811766,
"acc_norm": 0.1875,
"acc_norm_stderr": 0.023709788253811766
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3142857142857143,
"acc_stderr": 0.02971932942241748,
"acc_norm": 0.3142857142857143,
"acc_norm_stderr": 0.02971932942241748
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.25316455696202533,
"acc_stderr": 0.028304657943035293,
"acc_norm": 0.25316455696202533,
"acc_norm_stderr": 0.028304657943035293
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2522816166883963,
"acc_stderr": 0.011092789056875232,
"acc_norm": 0.2522816166883963,
"acc_norm_stderr": 0.011092789056875232
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25980392156862747,
"acc_stderr": 0.03077855467869326,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.03077855467869326
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.28484848484848485,
"acc_stderr": 0.03524390844511783,
"acc_norm": 0.28484848484848485,
"acc_norm_stderr": 0.03524390844511783
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24112607099143207,
"mc1_stderr": 0.01497482727975233,
"mc2": 0.39040412705496613,
"mc2_stderr": 0.01471780652709213
},
"harness|ko_commongen_v2|2": {
"acc": 0.5950704225352113,
"acc_stderr": 0.016827095223977993,
"acc_norm": 0.67018779342723,
"acc_norm_stderr": 0.016116355523395683
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "nlpai-lab/kullm-polyglot-12.8b-v2",
"model_sha": "9e0c9be881f663ca088b10faad15b54ea3ba779c",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}