results / 42MARU /llama-2-ko-7b-instruction-v3 /result_2023-10-01 18:41:33.json
open-ko-llm-bot's picture
Add results for 2023-10-01 18:41:33
a943c50
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3293515358361775,
"acc_stderr": 0.013734057652635474,
"acc_norm": 0.386518771331058,
"acc_norm_stderr": 0.014230084761910474
},
"harness|ko_hellaswag|10": {
"acc": 0.3835889265086636,
"acc_stderr": 0.00485265887677539,
"acc_norm": 0.5022903804023103,
"acc_norm_stderr": 0.004989729059957435
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.391812865497076,
"acc_stderr": 0.03743979825926401,
"acc_norm": 0.391812865497076,
"acc_norm_stderr": 0.03743979825926401
},
"harness|ko_mmlu_management|5": {
"acc": 0.2815533980582524,
"acc_stderr": 0.04453254836326466,
"acc_norm": 0.2815533980582524,
"acc_norm_stderr": 0.04453254836326466
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.367816091954023,
"acc_stderr": 0.01724382889184626,
"acc_norm": 0.367816091954023,
"acc_norm_stderr": 0.01724382889184626
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.04094376269996795,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.04094376269996795
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2851063829787234,
"acc_stderr": 0.02951319662553935,
"acc_norm": 0.2851063829787234,
"acc_norm_stderr": 0.02951319662553935
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3132530120481928,
"acc_stderr": 0.03610805018031024,
"acc_norm": 0.3132530120481928,
"acc_norm_stderr": 0.03610805018031024
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3858520900321543,
"acc_stderr": 0.027648149599751464,
"acc_norm": 0.3858520900321543,
"acc_norm_stderr": 0.027648149599751464
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.43946188340807174,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.43946188340807174,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4351145038167939,
"acc_stderr": 0.04348208051644858,
"acc_norm": 0.4351145038167939,
"acc_norm_stderr": 0.04348208051644858
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.31313131313131315,
"acc_stderr": 0.03304205087813653,
"acc_norm": 0.31313131313131315,
"acc_norm_stderr": 0.03304205087813653
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3586206896551724,
"acc_stderr": 0.039966295748767186,
"acc_norm": 0.3586206896551724,
"acc_norm_stderr": 0.039966295748767186
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.20588235294117646,
"acc_stderr": 0.04023382273617747,
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617747
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3697478991596639,
"acc_stderr": 0.031357095996135904,
"acc_norm": 0.3697478991596639,
"acc_norm_stderr": 0.031357095996135904
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2794871794871795,
"acc_stderr": 0.022752388839776823,
"acc_norm": 0.2794871794871795,
"acc_norm_stderr": 0.022752388839776823
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036846
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.37962962962962965,
"acc_stderr": 0.04691521224077742,
"acc_norm": 0.37962962962962965,
"acc_norm_stderr": 0.04691521224077742
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.22660098522167488,
"acc_stderr": 0.02945486383529298,
"acc_norm": 0.22660098522167488,
"acc_norm_stderr": 0.02945486383529298
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3225806451612903,
"acc_stderr": 0.026593084516572267,
"acc_norm": 0.3225806451612903,
"acc_norm_stderr": 0.026593084516572267
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5,
"acc_stderr": 0.03275608910402091,
"acc_norm": 0.5,
"acc_norm_stderr": 0.03275608910402091
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3471698113207547,
"acc_stderr": 0.029300101705549652,
"acc_norm": 0.3471698113207547,
"acc_norm_stderr": 0.029300101705549652
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.36363636363636365,
"acc_stderr": 0.04607582090719976,
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.04607582090719976
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.025928876132766118,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.025928876132766118
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2119205298013245,
"acc_stderr": 0.03336767086567977,
"acc_norm": 0.2119205298013245,
"acc_norm_stderr": 0.03336767086567977
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.39800995024875624,
"acc_stderr": 0.034611994290400135,
"acc_norm": 0.39800995024875624,
"acc_norm_stderr": 0.034611994290400135
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2774566473988439,
"acc_stderr": 0.03414014007044036,
"acc_norm": 0.2774566473988439,
"acc_norm_stderr": 0.03414014007044036
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24867724867724866,
"acc_stderr": 0.022261817692400175,
"acc_norm": 0.24867724867724866,
"acc_norm_stderr": 0.022261817692400175
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2916666666666667,
"acc_stderr": 0.03800968060554859,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.03800968060554859
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.38439306358381503,
"acc_stderr": 0.026189666966272035,
"acc_norm": 0.38439306358381503,
"acc_norm_stderr": 0.026189666966272035
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3496932515337423,
"acc_stderr": 0.03746668325470022,
"acc_norm": 0.3496932515337423,
"acc_norm_stderr": 0.03746668325470022
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.026725868809100793,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.026725868809100793
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.35751295336787564,
"acc_stderr": 0.03458816042181005,
"acc_norm": 0.35751295336787564,
"acc_norm_stderr": 0.03458816042181005
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03999423879281336,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281336
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3394495412844037,
"acc_stderr": 0.02030210934266235,
"acc_norm": 0.3394495412844037,
"acc_norm_stderr": 0.02030210934266235
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.039325376803928704,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.039325376803928704
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3660130718954248,
"acc_stderr": 0.027582811415159607,
"acc_norm": 0.3660130718954248,
"acc_norm_stderr": 0.027582811415159607
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.49586776859504134,
"acc_stderr": 0.04564198767432754,
"acc_norm": 0.49586776859504134,
"acc_norm_stderr": 0.04564198767432754
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.24342105263157895,
"acc_stderr": 0.034923496688842384,
"acc_norm": 0.24342105263157895,
"acc_norm_stderr": 0.034923496688842384
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3366013071895425,
"acc_stderr": 0.019117213911495165,
"acc_norm": 0.3366013071895425,
"acc_norm_stderr": 0.019117213911495165
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.29432624113475175,
"acc_stderr": 0.027187127011503796,
"acc_norm": 0.29432624113475175,
"acc_norm_stderr": 0.027187127011503796
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.29464285714285715,
"acc_stderr": 0.04327040932578728,
"acc_norm": 0.29464285714285715,
"acc_norm_stderr": 0.04327040932578728
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2824074074074074,
"acc_stderr": 0.030701372111510927,
"acc_norm": 0.2824074074074074,
"acc_norm_stderr": 0.030701372111510927
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.25139664804469275,
"acc_stderr": 0.014508979453553977,
"acc_norm": 0.25139664804469275,
"acc_norm_stderr": 0.014508979453553977
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3786764705882353,
"acc_stderr": 0.029465133639776125,
"acc_norm": 0.3786764705882353,
"acc_norm_stderr": 0.029465133639776125
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3510204081632653,
"acc_stderr": 0.03055531675557364,
"acc_norm": 0.3510204081632653,
"acc_norm_stderr": 0.03055531675557364
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4767932489451477,
"acc_stderr": 0.032512152011410174,
"acc_norm": 0.4767932489451477,
"acc_norm_stderr": 0.032512152011410174
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3089960886571056,
"acc_stderr": 0.01180172977723925,
"acc_norm": 0.3089960886571056,
"acc_norm_stderr": 0.01180172977723925
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.31862745098039214,
"acc_stderr": 0.032702871814820816,
"acc_norm": 0.31862745098039214,
"acc_norm_stderr": 0.032702871814820816
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3212121212121212,
"acc_stderr": 0.0364620496325381,
"acc_norm": 0.3212121212121212,
"acc_norm_stderr": 0.0364620496325381
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25091799265605874,
"mc1_stderr": 0.01517698502770769,
"mc2": 0.38056097212603235,
"mc2_stderr": 0.014936929596682727
},
"harness|ko_commongen_v2|2": {
"acc": 0.21605667060212513,
"acc_stderr": 0.014149496716043137,
"acc_norm": 0.29279811097992914,
"acc_norm_stderr": 0.015644823205401337
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "42MARU/llama-2-ko-7b-instruction-v3",
"model_sha": "c0fea9cb31d4ae90aa2ed048f774a9000341b538",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}