results / KT-AI /midm-bitext-S-7B-inst-v1 /result_2023-10-30 15:41:34.json
open-ko-llm-bot's picture
Add results for 2023-10-30 15:41:34
8a53089
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.34044368600682595,
"acc_stderr": 0.013847460518892981,
"acc_norm": 0.4249146757679181,
"acc_norm_stderr": 0.014445698968520769
},
"harness|ko_hellaswag|10": {
"acc": 0.4137621987651862,
"acc_stderr": 0.004915003499517832,
"acc_norm": 0.5510854411471818,
"acc_norm_stderr": 0.004963669199433383
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5029239766081871,
"acc_stderr": 0.03834759370936839,
"acc_norm": 0.5029239766081871,
"acc_norm_stderr": 0.03834759370936839
},
"harness|ko_mmlu_management|5": {
"acc": 0.47572815533980584,
"acc_stderr": 0.04944901092973781,
"acc_norm": 0.47572815533980584,
"acc_norm_stderr": 0.04944901092973781
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5593869731800766,
"acc_stderr": 0.01775339697390848,
"acc_norm": 0.5593869731800766,
"acc_norm_stderr": 0.01775339697390848
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.0424463323835323,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.0424463323835323
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3574468085106383,
"acc_stderr": 0.03132941789476425,
"acc_norm": 0.3574468085106383,
"acc_norm_stderr": 0.03132941789476425
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.3373493975903614,
"acc_norm_stderr": 0.03680783690727581
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4662379421221865,
"acc_stderr": 0.02833327710956281,
"acc_norm": 0.4662379421221865,
"acc_norm_stderr": 0.02833327710956281
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.48878923766816146,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.48878923766816146,
"acc_norm_stderr": 0.033549366530984746
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5190839694656488,
"acc_stderr": 0.04382094705550989,
"acc_norm": 0.5190839694656488,
"acc_norm_stderr": 0.04382094705550989
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.45,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.45,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5858585858585859,
"acc_stderr": 0.035094383488796295,
"acc_norm": 0.5858585858585859,
"acc_norm_stderr": 0.035094383488796295
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4068965517241379,
"acc_stderr": 0.04093793981266237,
"acc_norm": 0.4068965517241379,
"acc_norm_stderr": 0.04093793981266237
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.46218487394957986,
"acc_stderr": 0.032385469487589795,
"acc_norm": 0.46218487394957986,
"acc_norm_stderr": 0.032385469487589795
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.44871794871794873,
"acc_stderr": 0.025217315184846482,
"acc_norm": 0.44871794871794873,
"acc_norm_stderr": 0.025217315184846482
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956913
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3054187192118227,
"acc_stderr": 0.03240661565868408,
"acc_norm": 0.3054187192118227,
"acc_norm_stderr": 0.03240661565868408
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.44516129032258067,
"acc_stderr": 0.028272410186214906,
"acc_norm": 0.44516129032258067,
"acc_norm_stderr": 0.028272410186214906
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6538461538461539,
"acc_stderr": 0.0311669573672359,
"acc_norm": 0.6538461538461539,
"acc_norm_stderr": 0.0311669573672359
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.46037735849056605,
"acc_stderr": 0.030676096599389184,
"acc_norm": 0.46037735849056605,
"acc_norm_stderr": 0.030676096599389184
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.1962962962962963,
"acc_stderr": 0.024217421327417155,
"acc_norm": 0.1962962962962963,
"acc_norm_stderr": 0.024217421327417155
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.582089552238806,
"acc_stderr": 0.034875586404620636,
"acc_norm": 0.582089552238806,
"acc_norm_stderr": 0.034875586404620636
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4508670520231214,
"acc_stderr": 0.0379401267469703,
"acc_norm": 0.4508670520231214,
"acc_norm_stderr": 0.0379401267469703
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.0236369759961018,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.0236369759961018
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.040166600304512336,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.040166600304512336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.43352601156069365,
"acc_stderr": 0.02668013476167922,
"acc_norm": 0.43352601156069365,
"acc_norm_stderr": 0.02668013476167922
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4662576687116564,
"acc_stderr": 0.039194155450484096,
"acc_norm": 0.4662576687116564,
"acc_norm_stderr": 0.039194155450484096
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4382716049382716,
"acc_stderr": 0.027607914087400473,
"acc_norm": 0.4382716049382716,
"acc_norm_stderr": 0.027607914087400473
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5544041450777202,
"acc_stderr": 0.03587014986075659,
"acc_norm": 0.5544041450777202,
"acc_norm_stderr": 0.03587014986075659
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.24561403508771928,
"acc_stderr": 0.04049339297748141,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.04049339297748141
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5541284403669725,
"acc_stderr": 0.02131133500970858,
"acc_norm": 0.5541284403669725,
"acc_norm_stderr": 0.02131133500970858
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.36507936507936506,
"acc_stderr": 0.04306241259127155,
"acc_norm": 0.36507936507936506,
"acc_norm_stderr": 0.04306241259127155
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4477124183006536,
"acc_stderr": 0.028472938478033526,
"acc_norm": 0.4477124183006536,
"acc_norm_stderr": 0.028472938478033526
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5289256198347108,
"acc_stderr": 0.04556710331269498,
"acc_norm": 0.5289256198347108,
"acc_norm_stderr": 0.04556710331269498
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.04046336883978252,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.04046336883978252
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.019431775677037313,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.019431775677037313
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3475177304964539,
"acc_stderr": 0.028406627809590947,
"acc_norm": 0.3475177304964539,
"acc_norm_stderr": 0.028406627809590947
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.04432804055291519,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.04432804055291519
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3101851851851852,
"acc_stderr": 0.03154696285656629,
"acc_norm": 0.3101851851851852,
"acc_norm_stderr": 0.03154696285656629
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.29608938547486036,
"acc_stderr": 0.015268677317602286,
"acc_norm": 0.29608938547486036,
"acc_norm_stderr": 0.015268677317602286
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3860294117647059,
"acc_stderr": 0.029573269134411124,
"acc_norm": 0.3860294117647059,
"acc_norm_stderr": 0.029573269134411124
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3469387755102041,
"acc_stderr": 0.0304725260267265,
"acc_norm": 0.3469387755102041,
"acc_norm_stderr": 0.0304725260267265
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5864978902953587,
"acc_stderr": 0.03205649904851858,
"acc_norm": 0.5864978902953587,
"acc_norm_stderr": 0.03205649904851858
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.31681877444589307,
"acc_stderr": 0.011882349954722997,
"acc_norm": 0.31681877444589307,
"acc_norm_stderr": 0.011882349954722997
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.03019028245350194,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.03019028245350194
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.18787878787878787,
"acc_stderr": 0.030501934059429144,
"acc_norm": 0.18787878787878787,
"acc_norm_stderr": 0.030501934059429144
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.29498164014687883,
"mc1_stderr": 0.015964400965589657,
"mc2": 0.4574707149506456,
"mc2_stderr": 0.015369860749341643
},
"harness|ko_commongen_v2|2": {
"acc": 0.5844155844155844,
"acc_stderr": 0.016943586313076575,
"acc_norm": 0.5997638724911453,
"acc_norm_stderr": 0.016844693510505052
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "KT-AI/midm-bitext-S-7B-inst-v1",
"model_sha": "88545caeab1463c83a15c23f5282cd8ea781dd0b",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}