results / igig98 /ppo2 /result_2023-10-29 13:20:18.json
open-ko-llm-bot's picture
Add results for 2023-10-29 13:20:18
2ee3a2e
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2909556313993174,
"acc_stderr": 0.013273077865907573,
"acc_norm": 0.3447098976109215,
"acc_norm_stderr": 0.013888816286782112
},
"harness|ko_hellaswag|10": {
"acc": 0.3908583947420832,
"acc_stderr": 0.00486945515093382,
"acc_norm": 0.5073690499900418,
"acc_norm_stderr": 0.004989239462835228
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.29239766081871343,
"acc_stderr": 0.03488647713457923,
"acc_norm": 0.29239766081871343,
"acc_norm_stderr": 0.03488647713457923
},
"harness|ko_mmlu_management|5": {
"acc": 0.18446601941747573,
"acc_stderr": 0.03840423627288276,
"acc_norm": 0.18446601941747573,
"acc_norm_stderr": 0.03840423627288276
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2554278416347382,
"acc_stderr": 0.015594955384455765,
"acc_norm": 0.2554278416347382,
"acc_norm_stderr": 0.015594955384455765
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.041153246103369526
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939098,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939098
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.225531914893617,
"acc_stderr": 0.02732107841738753,
"acc_norm": 0.225531914893617,
"acc_norm_stderr": 0.02732107841738753
},
"harness|ko_mmlu_virology|5": {
"acc": 0.18072289156626506,
"acc_stderr": 0.029955737855810138,
"acc_norm": 0.18072289156626506,
"acc_norm_stderr": 0.029955737855810138
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.31189710610932475,
"acc_stderr": 0.02631185807185416,
"acc_norm": 0.31189710610932475,
"acc_norm_stderr": 0.02631185807185416
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.16143497757847533,
"acc_stderr": 0.02469395789912846,
"acc_norm": 0.16143497757847533,
"acc_norm_stderr": 0.02469395789912846
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.25190839694656486,
"acc_stderr": 0.038073871163060866,
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.038073871163060866
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2828282828282828,
"acc_stderr": 0.03208779558786751,
"acc_norm": 0.2828282828282828,
"acc_norm_stderr": 0.03208779558786751
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2827586206896552,
"acc_stderr": 0.03752833958003337,
"acc_norm": 0.2827586206896552,
"acc_norm_stderr": 0.03752833958003337
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.026653531596715477,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.026653531596715477
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2358974358974359,
"acc_stderr": 0.021525965407408726,
"acc_norm": 0.2358974358974359,
"acc_norm_stderr": 0.021525965407408726
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04186091791394607
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.030108330718011625,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.030108330718011625
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.25806451612903225,
"acc_stderr": 0.024892469172462826,
"acc_norm": 0.25806451612903225,
"acc_norm_stderr": 0.024892469172462826
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.23504273504273504,
"acc_stderr": 0.027778835904935437,
"acc_norm": 0.23504273504273504,
"acc_norm_stderr": 0.027778835904935437
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.24528301886792453,
"acc_stderr": 0.0264803571798957,
"acc_norm": 0.24528301886792453,
"acc_norm_stderr": 0.0264803571798957
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2,
"acc_stderr": 0.03831305140884601,
"acc_norm": 0.2,
"acc_norm_stderr": 0.03831305140884601
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2518518518518518,
"acc_stderr": 0.026466117538959912,
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.026466117538959912
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.208955223880597,
"acc_stderr": 0.028748298931728658,
"acc_norm": 0.208955223880597,
"acc_norm_stderr": 0.028748298931728658
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.033687629322594295,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.033687629322594295
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.23544973544973544,
"acc_stderr": 0.021851509822031715,
"acc_norm": 0.23544973544973544,
"acc_norm_stderr": 0.021851509822031715
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2916666666666667,
"acc_stderr": 0.03800968060554858,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.03800968060554858
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036846
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.23699421965317918,
"acc_stderr": 0.02289408248992599,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.02289408248992599
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3067484662576687,
"acc_stderr": 0.03623089915724148,
"acc_norm": 0.3067484662576687,
"acc_norm_stderr": 0.03623089915724148
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3117283950617284,
"acc_stderr": 0.025773111169630433,
"acc_norm": 0.3117283950617284,
"acc_norm_stderr": 0.025773111169630433
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.23316062176165803,
"acc_stderr": 0.03051611137147602,
"acc_norm": 0.23316062176165803,
"acc_norm_stderr": 0.03051611137147602
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.03947152782669415
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.24220183486238533,
"acc_stderr": 0.01836817630659862,
"acc_norm": 0.24220183486238533,
"acc_norm_stderr": 0.01836817630659862
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.16666666666666666,
"acc_stderr": 0.03333333333333338,
"acc_norm": 0.16666666666666666,
"acc_norm_stderr": 0.03333333333333338
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.238562091503268,
"acc_stderr": 0.024404394928087873,
"acc_norm": 0.238562091503268,
"acc_norm_stderr": 0.024404394928087873
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.22,
"acc_stderr": 0.041633319989322674,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322674
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.30578512396694213,
"acc_stderr": 0.04205953933884123,
"acc_norm": 0.30578512396694213,
"acc_norm_stderr": 0.04205953933884123
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3026315789473684,
"acc_stderr": 0.03738520676119667,
"acc_norm": 0.3026315789473684,
"acc_norm_stderr": 0.03738520676119667
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.28104575163398693,
"acc_stderr": 0.018185218954318082,
"acc_norm": 0.28104575163398693,
"acc_norm_stderr": 0.018185218954318082
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.025257861359432407,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432407
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.03894641120044793,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.03894641120044793
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.03275773486100998,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.03275773486100998
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27150837988826815,
"acc_stderr": 0.014874252168095278,
"acc_norm": 0.27150837988826815,
"acc_norm_stderr": 0.014874252168095278
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.375,
"acc_stderr": 0.029408372932278746,
"acc_norm": 0.375,
"acc_norm_stderr": 0.029408372932278746
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.24897959183673468,
"acc_stderr": 0.027682979522960234,
"acc_norm": 0.24897959183673468,
"acc_norm_stderr": 0.027682979522960234
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2869198312236287,
"acc_stderr": 0.029443773022594693,
"acc_norm": 0.2869198312236287,
"acc_norm_stderr": 0.029443773022594693
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.27053455019556716,
"acc_stderr": 0.011345996743539264,
"acc_norm": 0.27053455019556716,
"acc_norm_stderr": 0.011345996743539264
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.29901960784313725,
"acc_stderr": 0.03213325717373618,
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.03213325717373618
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2606060606060606,
"acc_stderr": 0.03427743175816524,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.03427743175816524
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2717258261933905,
"mc1_stderr": 0.015572840452875833,
"mc2": 0.4229362575464193,
"mc2_stderr": 0.015023014923371594
},
"harness|ko_commongen_v2|2": {
"acc": 0.3022432113341204,
"acc_stderr": 0.015788654863022375,
"acc_norm": 0.3447461629279811,
"acc_norm_stderr": 0.016340649905418697
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "igig98/ppo2",
"model_sha": "12b21b4455bffbaea9811bddb74ceb8cb6cc5f8c",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}