results / ENERGY-DRINK-LOVE /eeve_leaderboard_inst_v1.5 /result_2024-02-28 00:02:56.json
open-ko-llm-bot's picture
Add results for 2024-02-28 00:02:56
2cd3ff4 verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.44368600682593856,
"acc_stderr": 0.014518421825670447,
"acc_norm": 0.4906143344709898,
"acc_norm_stderr": 0.014608816322065
},
"harness|ko_hellaswag|10": {
"acc": 0.4576777534355706,
"acc_stderr": 0.0049718741597776965,
"acc_norm": 0.6170085640310695,
"acc_norm_stderr": 0.0048512275270708935
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6900584795321637,
"acc_stderr": 0.035469769593931624,
"acc_norm": 0.6900584795321637,
"acc_norm_stderr": 0.035469769593931624
},
"harness|ko_mmlu_management|5": {
"acc": 0.6990291262135923,
"acc_stderr": 0.045416094465039476,
"acc_norm": 0.6990291262135923,
"acc_norm_stderr": 0.045416094465039476
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.70242656449553,
"acc_stderr": 0.016349111912909435,
"acc_norm": 0.70242656449553,
"acc_norm_stderr": 0.016349111912909435
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.042925967182569816,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.042925967182569816
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4553191489361702,
"acc_stderr": 0.03255525359340355,
"acc_norm": 0.4553191489361702,
"acc_norm_stderr": 0.03255525359340355
},
"harness|ko_mmlu_virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.038913644958358175,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.038913644958358175
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6012861736334405,
"acc_stderr": 0.0278093225857745,
"acc_norm": 0.6012861736334405,
"acc_norm_stderr": 0.0278093225857745
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5964125560538116,
"acc_stderr": 0.032928028193303135,
"acc_norm": 0.5964125560538116,
"acc_norm_stderr": 0.032928028193303135
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5419847328244275,
"acc_stderr": 0.04369802690578757,
"acc_norm": 0.5419847328244275,
"acc_norm_stderr": 0.04369802690578757
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956913
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.696969696969697,
"acc_stderr": 0.032742879140268674,
"acc_norm": 0.696969696969697,
"acc_norm_stderr": 0.032742879140268674
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.47586206896551725,
"acc_stderr": 0.041618085035015295,
"acc_norm": 0.47586206896551725,
"acc_norm_stderr": 0.041618085035015295
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5756302521008403,
"acc_stderr": 0.03210479051015776,
"acc_norm": 0.5756302521008403,
"acc_norm_stderr": 0.03210479051015776
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5384615384615384,
"acc_stderr": 0.025275892070240648,
"acc_norm": 0.5384615384615384,
"acc_norm_stderr": 0.025275892070240648
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.58,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6944444444444444,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.6944444444444444,
"acc_norm_stderr": 0.044531975073749834
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4088669950738916,
"acc_stderr": 0.034590588158832314,
"acc_norm": 0.4088669950738916,
"acc_norm_stderr": 0.034590588158832314
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.6064516129032258,
"acc_stderr": 0.027791878753132267,
"acc_norm": 0.6064516129032258,
"acc_norm_stderr": 0.027791878753132267
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7692307692307693,
"acc_stderr": 0.02760192138141762,
"acc_norm": 0.7692307692307693,
"acc_norm_stderr": 0.02760192138141762
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5547169811320755,
"acc_stderr": 0.030588052974270658,
"acc_norm": 0.5547169811320755,
"acc_norm_stderr": 0.030588052974270658
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.04782001791380061,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.04782001791380061
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.32592592592592595,
"acc_stderr": 0.028578348365473072,
"acc_norm": 0.32592592592592595,
"acc_norm_stderr": 0.028578348365473072
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.03734535676787198,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.03734535676787198
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6567164179104478,
"acc_stderr": 0.03357379665433431,
"acc_norm": 0.6567164179104478,
"acc_norm_stderr": 0.03357379665433431
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.48554913294797686,
"acc_stderr": 0.03810871630454764,
"acc_norm": 0.48554913294797686,
"acc_norm_stderr": 0.03810871630454764
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.024278568024307695,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.024278568024307695
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4583333333333333,
"acc_stderr": 0.04166666666666665,
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.04166666666666665
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.81,
"acc_stderr": 0.03942772444036623,
"acc_norm": 0.81,
"acc_norm_stderr": 0.03942772444036623
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5838150289017341,
"acc_stderr": 0.026538189104705477,
"acc_norm": 0.5838150289017341,
"acc_norm_stderr": 0.026538189104705477
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5337423312883436,
"acc_stderr": 0.039194155450484096,
"acc_norm": 0.5337423312883436,
"acc_norm_stderr": 0.039194155450484096
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5895061728395061,
"acc_stderr": 0.027371350925124768,
"acc_norm": 0.5895061728395061,
"acc_norm_stderr": 0.027371350925124768
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6994818652849741,
"acc_stderr": 0.0330881859441575,
"acc_norm": 0.6994818652849741,
"acc_norm_stderr": 0.0330881859441575
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.32456140350877194,
"acc_stderr": 0.04404556157374768,
"acc_norm": 0.32456140350877194,
"acc_norm_stderr": 0.04404556157374768
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6256880733944954,
"acc_stderr": 0.020748959408988327,
"acc_norm": 0.6256880733944954,
"acc_norm_stderr": 0.020748959408988327
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.04163453031302859,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.04163453031302859
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5849673202614379,
"acc_stderr": 0.028213504177824093,
"acc_norm": 0.5849673202614379,
"acc_norm_stderr": 0.028213504177824093
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.04369236326573981,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.04369236326573981
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.6052631578947368,
"acc_stderr": 0.039777499346220734,
"acc_norm": 0.6052631578947368,
"acc_norm_stderr": 0.039777499346220734
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4852941176470588,
"acc_stderr": 0.020219083895133924,
"acc_norm": 0.4852941176470588,
"acc_norm_stderr": 0.020219083895133924
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.36879432624113473,
"acc_stderr": 0.028782227561347247,
"acc_norm": 0.36879432624113473,
"acc_norm_stderr": 0.028782227561347247
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.4017857142857143,
"acc_stderr": 0.04653333146973646,
"acc_norm": 0.4017857142857143,
"acc_norm_stderr": 0.04653333146973646
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4583333333333333,
"acc_stderr": 0.03398110890294636,
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.03398110890294636
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574894,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574894
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.65,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.65,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4889705882352941,
"acc_stderr": 0.030365446477275675,
"acc_norm": 0.4889705882352941,
"acc_norm_stderr": 0.030365446477275675
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6448979591836734,
"acc_stderr": 0.030635655150387634,
"acc_norm": 0.6448979591836734,
"acc_norm_stderr": 0.030635655150387634
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.70042194092827,
"acc_stderr": 0.0298180247497531,
"acc_norm": 0.70042194092827,
"acc_norm_stderr": 0.0298180247497531
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3650586701434159,
"acc_stderr": 0.012296373743443475,
"acc_norm": 0.3650586701434159,
"acc_norm_stderr": 0.012296373743443475
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6715686274509803,
"acc_stderr": 0.03296245110172229,
"acc_norm": 0.6715686274509803,
"acc_norm_stderr": 0.03296245110172229
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6909090909090909,
"acc_stderr": 0.036085410115739666,
"acc_norm": 0.6909090909090909,
"acc_norm_stderr": 0.036085410115739666
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2607099143206854,
"mc1_stderr": 0.015368841620766372,
"mc2": 0.4167656798454404,
"mc2_stderr": 0.015096503185058233
},
"harness|ko_commongen_v2|2": {
"acc": 0.48760330578512395,
"acc_stderr": 0.01718506973267653,
"acc_norm": 0.526564344746163,
"acc_norm_stderr": 0.017166075717577747
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "ENERGY-DRINK-LOVE/eeve_leaderboard_inst_v1.5",
"model_sha": "78505257163f6e45ed327b68f7bddb7c9ff509fd",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}