results / BM-K /mistral-7b-it-v1.0 /result_2023-11-07 08:10:27.json
open-ko-llm-bot's picture
Add results for 2023-11-07 08:10:27
b77702a
raw
history blame
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.35409556313993173,
"acc_stderr": 0.013975454122756562,
"acc_norm": 0.4087030716723549,
"acc_norm_stderr": 0.014365750345427
},
"harness|ko_hellaswag|10": {
"acc": 0.3875721967735511,
"acc_stderr": 0.004862003566798545,
"acc_norm": 0.504282015534754,
"acc_norm_stderr": 0.004989598426249547
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4853801169590643,
"acc_stderr": 0.038331852752130205,
"acc_norm": 0.4853801169590643,
"acc_norm_stderr": 0.038331852752130205
},
"harness|ko_mmlu_management|5": {
"acc": 0.5533980582524272,
"acc_stderr": 0.04922424153458933,
"acc_norm": 0.5533980582524272,
"acc_norm_stderr": 0.04922424153458933
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.51213282247765,
"acc_stderr": 0.01787469866749133,
"acc_norm": 0.51213282247765,
"acc_norm_stderr": 0.01787469866749133
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.04292596718256981,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04292596718256981
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421255,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421255
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.41702127659574467,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.41702127659574467,
"acc_norm_stderr": 0.03223276266711712
},
"harness|ko_mmlu_virology|5": {
"acc": 0.43373493975903615,
"acc_stderr": 0.03858158940685516,
"acc_norm": 0.43373493975903615,
"acc_norm_stderr": 0.03858158940685516
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.49517684887459806,
"acc_stderr": 0.028396770444111288,
"acc_norm": 0.49517684887459806,
"acc_norm_stderr": 0.028396770444111288
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.48878923766816146,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.48878923766816146,
"acc_norm_stderr": 0.033549366530984746
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4122137404580153,
"acc_stderr": 0.04317171194870254,
"acc_norm": 0.4122137404580153,
"acc_norm_stderr": 0.04317171194870254
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5454545454545454,
"acc_stderr": 0.03547601494006937,
"acc_norm": 0.5454545454545454,
"acc_norm_stderr": 0.03547601494006937
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4482758620689655,
"acc_stderr": 0.04144311810878152,
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.04144311810878152
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171452,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171452
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.40756302521008403,
"acc_stderr": 0.03191863374478465,
"acc_norm": 0.40756302521008403,
"acc_norm_stderr": 0.03191863374478465
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4307692307692308,
"acc_stderr": 0.02510682066053975,
"acc_norm": 0.4307692307692308,
"acc_norm_stderr": 0.02510682066053975
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695237
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.04826217294139894,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.04826217294139894
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.43349753694581283,
"acc_stderr": 0.03486731727419872,
"acc_norm": 0.43349753694581283,
"acc_norm_stderr": 0.03486731727419872
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45806451612903226,
"acc_stderr": 0.02834378725054062,
"acc_norm": 0.45806451612903226,
"acc_norm_stderr": 0.02834378725054062
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6709401709401709,
"acc_stderr": 0.030782321577688173,
"acc_norm": 0.6709401709401709,
"acc_norm_stderr": 0.030782321577688173
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.41509433962264153,
"acc_stderr": 0.03032594578928611,
"acc_norm": 0.41509433962264153,
"acc_norm_stderr": 0.03032594578928611
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.43636363636363634,
"acc_stderr": 0.04750185058907297,
"acc_norm": 0.43636363636363634,
"acc_norm_stderr": 0.04750185058907297
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.02696242432507382,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.02696242432507382
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.24503311258278146,
"acc_stderr": 0.03511807571804723,
"acc_norm": 0.24503311258278146,
"acc_norm_stderr": 0.03511807571804723
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5870646766169154,
"acc_stderr": 0.03481520803367348,
"acc_norm": 0.5870646766169154,
"acc_norm_stderr": 0.03481520803367348
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3930635838150289,
"acc_stderr": 0.03724249595817729,
"acc_norm": 0.3930635838150289,
"acc_norm_stderr": 0.03724249595817729
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.024677862841332786,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.024677862841332786
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.375,
"acc_stderr": 0.04048439222695598,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04048439222695598
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562427,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562427
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.47398843930635837,
"acc_stderr": 0.026882643434022895,
"acc_norm": 0.47398843930635837,
"acc_norm_stderr": 0.026882643434022895
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.43558282208588955,
"acc_stderr": 0.03895632464138937,
"acc_norm": 0.43558282208588955,
"acc_norm_stderr": 0.03895632464138937
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4567901234567901,
"acc_stderr": 0.027716661650194038,
"acc_norm": 0.4567901234567901,
"acc_norm_stderr": 0.027716661650194038
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5233160621761658,
"acc_stderr": 0.03604513672442202,
"acc_norm": 0.5233160621761658,
"acc_norm_stderr": 0.03604513672442202
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.044346007015849245,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.044346007015849245
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.48256880733944957,
"acc_stderr": 0.02142429187185315,
"acc_norm": 0.48256880733944957,
"acc_norm_stderr": 0.02142429187185315
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.03809523809523811,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.03809523809523811
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.434640522875817,
"acc_stderr": 0.028384256704883037,
"acc_norm": 0.434640522875817,
"acc_norm_stderr": 0.028384256704883037
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6528925619834711,
"acc_stderr": 0.04345724570292534,
"acc_norm": 0.6528925619834711,
"acc_norm_stderr": 0.04345724570292534
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3881578947368421,
"acc_stderr": 0.03965842097512744,
"acc_norm": 0.3881578947368421,
"acc_norm_stderr": 0.03965842097512744
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.019559646809215934,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.019559646809215934
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.33687943262411346,
"acc_stderr": 0.02819553487396673,
"acc_norm": 0.33687943262411346,
"acc_norm_stderr": 0.02819553487396673
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.04432804055291518,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.04432804055291518
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.375,
"acc_stderr": 0.033016908987210894,
"acc_norm": 0.375,
"acc_norm_stderr": 0.033016908987210894
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24134078212290502,
"acc_stderr": 0.014310999547961443,
"acc_norm": 0.24134078212290502,
"acc_norm_stderr": 0.014310999547961443
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3382352941176471,
"acc_stderr": 0.028739328513983576,
"acc_norm": 0.3382352941176471,
"acc_norm_stderr": 0.028739328513983576
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.39183673469387753,
"acc_stderr": 0.03125127591089165,
"acc_norm": 0.39183673469387753,
"acc_norm_stderr": 0.03125127591089165
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5780590717299579,
"acc_stderr": 0.032148146302403695,
"acc_norm": 0.5780590717299579,
"acc_norm_stderr": 0.032148146302403695
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.32985658409387225,
"acc_stderr": 0.012008129938540472,
"acc_norm": 0.32985658409387225,
"acc_norm_stderr": 0.012008129938540472
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.03465868196380758,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.03465868196380758
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.4727272727272727,
"acc_stderr": 0.03898531605579419,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.03898531605579419
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2802937576499388,
"mc1_stderr": 0.015723139524608742,
"mc2": 0.4447858809482175,
"mc2_stderr": 0.015211057250300537
},
"harness|ko_commongen_v2|2": {
"acc": 0.3860684769775679,
"acc_stderr": 0.016738130760321743,
"acc_norm": 0.5100354191263282,
"acc_norm_stderr": 0.017186891286894067
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "BM-K/mistral-7b-it-v1.0",
"model_sha": "f5bfb9dc4f4dd8b64d45c9a158e3982959b18035",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}