results / DILAB-HYU /koquality-ko-ref-llama2-7b /result_2023-11-05 11:52:21.json
open-ko-llm-bot's picture
Add results for 2023-11-05 11:52:21
abe1c32
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.32593856655290104,
"acc_stderr": 0.01369743246669324,
"acc_norm": 0.3779863481228669,
"acc_norm_stderr": 0.0141696645203031
},
"harness|ko_hellaswag|10": {
"acc": 0.377414857598088,
"acc_stderr": 0.0048374934398742984,
"acc_norm": 0.48755228042222665,
"acc_norm_stderr": 0.004988234881206747
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.03615507630310935,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03615507630310935
},
"harness|ko_mmlu_management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266196,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266196
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3116219667943806,
"acc_stderr": 0.016562433867284176,
"acc_norm": 0.3116219667943806,
"acc_norm_stderr": 0.016562433867284176
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04072314811876837,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04072314811876837
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3148936170212766,
"acc_stderr": 0.030363582197238167,
"acc_norm": 0.3148936170212766,
"acc_norm_stderr": 0.030363582197238167
},
"harness|ko_mmlu_virology|5": {
"acc": 0.23493975903614459,
"acc_stderr": 0.03300533186128922,
"acc_norm": 0.23493975903614459,
"acc_norm_stderr": 0.03300533186128922
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.28938906752411575,
"acc_stderr": 0.025755865922632945,
"acc_norm": 0.28938906752411575,
"acc_norm_stderr": 0.025755865922632945
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2825112107623318,
"acc_stderr": 0.03021683101150876,
"acc_norm": 0.2825112107623318,
"acc_norm_stderr": 0.03021683101150876
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2748091603053435,
"acc_stderr": 0.039153454088478354,
"acc_norm": 0.2748091603053435,
"acc_norm_stderr": 0.039153454088478354
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.32323232323232326,
"acc_stderr": 0.033322999210706444,
"acc_norm": 0.32323232323232326,
"acc_norm_stderr": 0.033322999210706444
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.23448275862068965,
"acc_stderr": 0.035306258743465914,
"acc_norm": 0.23448275862068965,
"acc_norm_stderr": 0.035306258743465914
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.17647058823529413,
"acc_stderr": 0.0379328118530781,
"acc_norm": 0.17647058823529413,
"acc_norm_stderr": 0.0379328118530781
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.29411764705882354,
"acc_stderr": 0.029597329730978096,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.029597329730978096
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.25384615384615383,
"acc_stderr": 0.022066054378726257,
"acc_norm": 0.25384615384615383,
"acc_norm_stderr": 0.022066054378726257
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.3,
"acc_stderr": 0.04605661864718381,
"acc_norm": 0.3,
"acc_norm_stderr": 0.04605661864718381
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.16,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.16,
"acc_norm_stderr": 0.03684529491774709
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.04133119440243839,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.04133119440243839
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2955665024630542,
"acc_stderr": 0.032104944337514575,
"acc_norm": 0.2955665024630542,
"acc_norm_stderr": 0.032104944337514575
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2967741935483871,
"acc_stderr": 0.025988500792411894,
"acc_norm": 0.2967741935483871,
"acc_norm_stderr": 0.025988500792411894
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.32051282051282054,
"acc_stderr": 0.030572811310299607,
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.030572811310299607
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.26037735849056604,
"acc_stderr": 0.027008766090708087,
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.027008766090708087
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.040693063197213775,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.040693063197213775
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23333333333333334,
"acc_stderr": 0.02578787422095932,
"acc_norm": 0.23333333333333334,
"acc_norm_stderr": 0.02578787422095932
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.037345356767871984,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.037345356767871984
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.29850746268656714,
"acc_stderr": 0.032357437893550424,
"acc_norm": 0.29850746268656714,
"acc_norm_stderr": 0.032357437893550424
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2804232804232804,
"acc_stderr": 0.02313528797432563,
"acc_norm": 0.2804232804232804,
"acc_norm_stderr": 0.02313528797432563
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.03852084696008534,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.03852084696008534
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816507,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816507
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.023786203255508283,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.023786203255508283
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.32515337423312884,
"acc_stderr": 0.03680350371286462,
"acc_norm": 0.32515337423312884,
"acc_norm_stderr": 0.03680350371286462
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2808641975308642,
"acc_stderr": 0.02500646975579921,
"acc_norm": 0.2808641975308642,
"acc_norm_stderr": 0.02500646975579921
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.26424870466321243,
"acc_stderr": 0.03182155050916649,
"acc_norm": 0.26424870466321243,
"acc_norm_stderr": 0.03182155050916649
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022056,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.28623853211009176,
"acc_stderr": 0.019379436628919968,
"acc_norm": 0.28623853211009176,
"acc_norm_stderr": 0.019379436628919968
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.03670066451047181,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.03670066451047181
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2908496732026144,
"acc_stderr": 0.026004800363952113,
"acc_norm": 0.2908496732026144,
"acc_norm_stderr": 0.026004800363952113
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.36363636363636365,
"acc_stderr": 0.043913262867240704,
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.043913262867240704
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3092105263157895,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.3092105263157895,
"acc_norm_stderr": 0.037610708698674805
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.23366013071895425,
"acc_stderr": 0.017119158496044506,
"acc_norm": 0.23366013071895425,
"acc_norm_stderr": 0.017119158496044506
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2801418439716312,
"acc_stderr": 0.026789172351140245,
"acc_norm": 0.2801418439716312,
"acc_norm_stderr": 0.026789172351140245
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.04203277291467761,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.04203277291467761
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2638888888888889,
"acc_stderr": 0.030058202704309846,
"acc_norm": 0.2638888888888889,
"acc_norm_stderr": 0.030058202704309846
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2569832402234637,
"acc_stderr": 0.014614465821966358,
"acc_norm": 0.2569832402234637,
"acc_norm_stderr": 0.014614465821966358
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3125,
"acc_stderr": 0.02815637344037142,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.02815637344037142
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4,
"acc_stderr": 0.031362502409358915,
"acc_norm": 0.4,
"acc_norm_stderr": 0.031362502409358915
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2742616033755274,
"acc_stderr": 0.02904133351059804,
"acc_norm": 0.2742616033755274,
"acc_norm_stderr": 0.02904133351059804
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.27835723598435463,
"acc_stderr": 0.011446990197380984,
"acc_norm": 0.27835723598435463,
"acc_norm_stderr": 0.011446990197380984
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25980392156862747,
"acc_stderr": 0.030778554678693264,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.030778554678693264
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.30303030303030304,
"acc_stderr": 0.035886248000917075,
"acc_norm": 0.30303030303030304,
"acc_norm_stderr": 0.035886248000917075
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25091799265605874,
"mc1_stderr": 0.01517698502770768,
"mc2": 0.41091136339297607,
"mc2_stderr": 0.014831976469805178
},
"harness|ko_commongen_v2|2": {
"acc": 0.3270365997638725,
"acc_stderr": 0.016129047485457022,
"acc_norm": 0.40968122786304606,
"acc_norm_stderr": 0.01690756819221947
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DILAB-HYU/koquality-ko-ref-llama2-7b",
"model_sha": "3ef89d06e678a10cd678b2f0258d0f4a0ef2b5bb",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}