results / DopeorNope /Ko-Mixtral-MoE-7Bx2 /result_2024-01-20 07:42:16.json
open-ko-llm-bot's picture
Add results for 2024-01-20 07:42:16
a453881 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.32593856655290104,
"acc_stderr": 0.013697432466693239,
"acc_norm": 0.3703071672354949,
"acc_norm_stderr": 0.01411129875167495
},
"harness|ko_hellaswag|10": {
"acc": 0.3575980880302729,
"acc_stderr": 0.004783133725599501,
"acc_norm": 0.45907189802828124,
"acc_norm_stderr": 0.0049730364538637176
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3684210526315789,
"acc_stderr": 0.036996580176568775,
"acc_norm": 0.3684210526315789,
"acc_norm_stderr": 0.036996580176568775
},
"harness|ko_mmlu_management|5": {
"acc": 0.5339805825242718,
"acc_stderr": 0.04939291447273481,
"acc_norm": 0.5339805825242718,
"acc_norm_stderr": 0.04939291447273481
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.017570705239256534,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.017570705239256534
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04072314811876837,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04072314811876837
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3617021276595745,
"acc_stderr": 0.0314108219759624,
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.0314108219759624
},
"harness|ko_mmlu_virology|5": {
"acc": 0.2891566265060241,
"acc_stderr": 0.03529486801511115,
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.03529486801511115
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.38263665594855306,
"acc_stderr": 0.027604689028581986,
"acc_norm": 0.38263665594855306,
"acc_norm_stderr": 0.027604689028581986
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3991031390134529,
"acc_stderr": 0.03286745312567961,
"acc_norm": 0.3991031390134529,
"acc_norm_stderr": 0.03286745312567961
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.3893129770992366,
"acc_stderr": 0.04276486542814591,
"acc_norm": 0.3893129770992366,
"acc_norm_stderr": 0.04276486542814591
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.03547601494006937,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.03547601494006937
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4068965517241379,
"acc_stderr": 0.040937939812662374,
"acc_norm": 0.4068965517241379,
"acc_norm_stderr": 0.040937939812662374
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201943,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201943
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.46638655462184875,
"acc_stderr": 0.03240501447690071,
"acc_norm": 0.46638655462184875,
"acc_norm_stderr": 0.03240501447690071
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.39487179487179486,
"acc_stderr": 0.024784316942156367,
"acc_norm": 0.39487179487179486,
"acc_norm_stderr": 0.024784316942156367
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3497536945812808,
"acc_stderr": 0.033554009049695646,
"acc_norm": 0.3497536945812808,
"acc_norm_stderr": 0.033554009049695646
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.38387096774193546,
"acc_stderr": 0.02766618207553963,
"acc_norm": 0.38387096774193546,
"acc_norm_stderr": 0.02766618207553963
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.688034188034188,
"acc_stderr": 0.03035152732334494,
"acc_norm": 0.688034188034188,
"acc_norm_stderr": 0.03035152732334494
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.39245283018867927,
"acc_stderr": 0.030052580579557838,
"acc_norm": 0.39245283018867927,
"acc_norm_stderr": 0.030052580579557838
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4818181818181818,
"acc_stderr": 0.04785964010794916,
"acc_norm": 0.4818181818181818,
"acc_norm_stderr": 0.04785964010794916
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.028493465091028593,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.028493465091028593
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526732,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526732
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5074626865671642,
"acc_stderr": 0.03535140084276719,
"acc_norm": 0.5074626865671642,
"acc_norm_stderr": 0.03535140084276719
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3872832369942196,
"acc_stderr": 0.03714325906302065,
"acc_norm": 0.3872832369942196,
"acc_norm_stderr": 0.03714325906302065
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.36507936507936506,
"acc_stderr": 0.02479606060269994,
"acc_norm": 0.36507936507936506,
"acc_norm_stderr": 0.02479606060269994
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3402777777777778,
"acc_stderr": 0.03962135573486219,
"acc_norm": 0.3402777777777778,
"acc_norm_stderr": 0.03962135573486219
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.43641618497109824,
"acc_stderr": 0.026700545424943677,
"acc_norm": 0.43641618497109824,
"acc_norm_stderr": 0.026700545424943677
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4110429447852761,
"acc_stderr": 0.038656978537853624,
"acc_norm": 0.4110429447852761,
"acc_norm_stderr": 0.038656978537853624
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3765432098765432,
"acc_stderr": 0.026959344518747784,
"acc_norm": 0.3765432098765432,
"acc_norm_stderr": 0.026959344518747784
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.47668393782383417,
"acc_stderr": 0.03604513672442205,
"acc_norm": 0.47668393782383417,
"acc_norm_stderr": 0.03604513672442205
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.04227054451232199,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.04227054451232199
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.46422018348623856,
"acc_stderr": 0.021382364775701906,
"acc_norm": 0.46422018348623856,
"acc_norm_stderr": 0.021382364775701906
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3253968253968254,
"acc_stderr": 0.041905964388711366,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.041905964388711366
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.40522875816993464,
"acc_stderr": 0.028110928492809082,
"acc_norm": 0.40522875816993464,
"acc_norm_stderr": 0.028110928492809082
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5702479338842975,
"acc_stderr": 0.04519082021319772,
"acc_norm": 0.5702479338842975,
"acc_norm_stderr": 0.04519082021319772
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3223684210526316,
"acc_stderr": 0.03803510248351585,
"acc_norm": 0.3223684210526316,
"acc_norm_stderr": 0.03803510248351585
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3366013071895425,
"acc_stderr": 0.019117213911495165,
"acc_norm": 0.3366013071895425,
"acc_norm_stderr": 0.019117213911495165
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.33687943262411346,
"acc_stderr": 0.02819553487396673,
"acc_norm": 0.33687943262411346,
"acc_norm_stderr": 0.02819553487396673
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.4017857142857143,
"acc_stderr": 0.04653333146973646,
"acc_norm": 0.4017857142857143,
"acc_norm_stderr": 0.04653333146973646
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.03225941352631295
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24581005586592178,
"acc_stderr": 0.014400296429225601,
"acc_norm": 0.24581005586592178,
"acc_norm_stderr": 0.014400296429225601
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3088235294117647,
"acc_stderr": 0.02806499816704009,
"acc_norm": 0.3088235294117647,
"acc_norm_stderr": 0.02806499816704009
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4326530612244898,
"acc_stderr": 0.03171752824062664,
"acc_norm": 0.4326530612244898,
"acc_norm_stderr": 0.03171752824062664
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.569620253164557,
"acc_stderr": 0.03223017195937599,
"acc_norm": 0.569620253164557,
"acc_norm_stderr": 0.03223017195937599
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.31681877444589307,
"acc_stderr": 0.011882349954723,
"acc_norm": 0.31681877444589307,
"acc_norm_stderr": 0.011882349954723
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.39705882352941174,
"acc_stderr": 0.03434131164719129,
"acc_norm": 0.39705882352941174,
"acc_norm_stderr": 0.03434131164719129
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.038881769216741004,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.038881769216741004
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2974296205630355,
"mc1_stderr": 0.016002651487361005,
"mc2": 0.47967789733728444,
"mc2_stderr": 0.015628805381493933
},
"harness|ko_commongen_v2|2": {
"acc": 0.3955135773317591,
"acc_stderr": 0.016810815902206042,
"acc_norm": 0.4462809917355372,
"acc_norm_stderr": 0.017090852631668336
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DopeorNope/Ko-Mixtral-MoE-7Bx2",
"model_sha": "af30206f35cca42c24b11722c944cfea46e42208",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}