results / mssma /ko-solar-10.7b-v0.1 /result_2024-04-15 05:50:27.json
open-ko-llm-bot's picture
Add results for 2024-04-15 05:50:27
d170aec verified
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.20563139931740615,
"acc_stderr": 0.011810745260742578,
"acc_norm": 0.24914675767918087,
"acc_norm_stderr": 0.012639407111926442
},
"harness|ko_hellaswag|10": {
"acc": 0.253734315873332,
"acc_stderr": 0.0043425802776627205,
"acc_norm": 0.2551284604660426,
"acc_norm_stderr": 0.0043504247506462035
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.23391812865497075,
"acc_stderr": 0.03246721765117827,
"acc_norm": 0.23391812865497075,
"acc_norm_stderr": 0.03246721765117827
},
"harness|ko_mmlu_management|5": {
"acc": 0.2524271844660194,
"acc_stderr": 0.04301250399690877,
"acc_norm": 0.2524271844660194,
"acc_norm_stderr": 0.04301250399690877
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.26181353767560667,
"acc_stderr": 0.015720838678445252,
"acc_norm": 0.26181353767560667,
"acc_norm_stderr": 0.015720838678445252
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.03673731683969506,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.03673731683969506
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768077,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768077
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.32340425531914896,
"acc_stderr": 0.03057944277361034,
"acc_norm": 0.32340425531914896,
"acc_norm_stderr": 0.03057944277361034
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3192771084337349,
"acc_stderr": 0.03629335329947859,
"acc_norm": 0.3192771084337349,
"acc_norm_stderr": 0.03629335329947859
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.27009646302250806,
"acc_stderr": 0.025218040373410605,
"acc_norm": 0.27009646302250806,
"acc_norm_stderr": 0.025218040373410605
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.37668161434977576,
"acc_stderr": 0.03252113489929189,
"acc_norm": 0.37668161434977576,
"acc_norm_stderr": 0.03252113489929189
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.22900763358778625,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.22900763358778625,
"acc_norm_stderr": 0.036853466317118506
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932268,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932268
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.21717171717171718,
"acc_stderr": 0.029376616484945627,
"acc_norm": 0.21717171717171718,
"acc_norm_stderr": 0.029376616484945627
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.21379310344827587,
"acc_stderr": 0.03416520447747549,
"acc_norm": 0.21379310344827587,
"acc_norm_stderr": 0.03416520447747549
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179962,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.03950581861179962
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.23109243697478993,
"acc_stderr": 0.027381406927868973,
"acc_norm": 0.23109243697478993,
"acc_norm_stderr": 0.027381406927868973
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.21794871794871795,
"acc_stderr": 0.020932445774463168,
"acc_norm": 0.21794871794871795,
"acc_norm_stderr": 0.020932445774463168
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.04330043749650742,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.04330043749650742
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.26108374384236455,
"acc_stderr": 0.03090379695211447,
"acc_norm": 0.26108374384236455,
"acc_norm_stderr": 0.03090379695211447
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.23870967741935484,
"acc_stderr": 0.024251071262208837,
"acc_norm": 0.23870967741935484,
"acc_norm_stderr": 0.024251071262208837
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.24786324786324787,
"acc_stderr": 0.028286324075564404,
"acc_norm": 0.24786324786324787,
"acc_norm_stderr": 0.028286324075564404
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.26037735849056604,
"acc_stderr": 0.0270087660907081,
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.0270087660907081
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.3181818181818182,
"acc_stderr": 0.04461272175910508,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.04461272175910508
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02671924078371216,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02671924078371216
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.1986754966887417,
"acc_stderr": 0.03257847384436776,
"acc_norm": 0.1986754966887417,
"acc_norm_stderr": 0.03257847384436776
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.22388059701492538,
"acc_stderr": 0.029475250236017197,
"acc_norm": 0.22388059701492538,
"acc_norm_stderr": 0.029475250236017197
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.23121387283236994,
"acc_stderr": 0.03214737302029471,
"acc_norm": 0.23121387283236994,
"acc_norm_stderr": 0.03214737302029471
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.022418042891113946,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.022418042891113946
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03476590104304134
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.24566473988439305,
"acc_stderr": 0.02317629820399201,
"acc_norm": 0.24566473988439305,
"acc_norm_stderr": 0.02317629820399201
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.24539877300613497,
"acc_stderr": 0.03380939813943353,
"acc_norm": 0.24539877300613497,
"acc_norm_stderr": 0.03380939813943353
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.27469135802469136,
"acc_stderr": 0.024836057868294674,
"acc_norm": 0.27469135802469136,
"acc_norm_stderr": 0.024836057868294674
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.18652849740932642,
"acc_stderr": 0.02811209121011747,
"acc_norm": 0.18652849740932642,
"acc_norm_stderr": 0.02811209121011747
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512321984,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512321984
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.23669724770642203,
"acc_stderr": 0.018224078117299095,
"acc_norm": 0.23669724770642203,
"acc_norm_stderr": 0.018224078117299095
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.20634920634920634,
"acc_stderr": 0.0361960452412425,
"acc_norm": 0.20634920634920634,
"acc_norm_stderr": 0.0361960452412425
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.21895424836601307,
"acc_stderr": 0.02367908986180772,
"acc_norm": 0.21895424836601307,
"acc_norm_stderr": 0.02367908986180772
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.24793388429752067,
"acc_stderr": 0.039418975265163025,
"acc_norm": 0.24793388429752067,
"acc_norm_stderr": 0.039418975265163025
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.18421052631578946,
"acc_stderr": 0.031546980450822305,
"acc_norm": 0.18421052631578946,
"acc_norm_stderr": 0.031546980450822305
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25,
"acc_stderr": 0.01751781884501444,
"acc_norm": 0.25,
"acc_norm_stderr": 0.01751781884501444
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.24468085106382978,
"acc_stderr": 0.025645553622266733,
"acc_norm": 0.24468085106382978,
"acc_norm_stderr": 0.025645553622266733
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952687,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952687
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.16203703703703703,
"acc_stderr": 0.025130453652268455,
"acc_norm": 0.16203703703703703,
"acc_norm_stderr": 0.025130453652268455
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.15,
"acc_stderr": 0.0358870281282637,
"acc_norm": 0.15,
"acc_norm_stderr": 0.0358870281282637
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.1948529411764706,
"acc_stderr": 0.024060599423487417,
"acc_norm": 0.1948529411764706,
"acc_norm_stderr": 0.024060599423487417
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.17142857142857143,
"acc_stderr": 0.024127463462650146,
"acc_norm": 0.17142857142857143,
"acc_norm_stderr": 0.024127463462650146
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.25738396624472576,
"acc_stderr": 0.0284588209914603,
"acc_norm": 0.25738396624472576,
"acc_norm_stderr": 0.0284588209914603
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2405475880052151,
"acc_stderr": 0.010916406735478947,
"acc_norm": 0.2405475880052151,
"acc_norm_stderr": 0.010916406735478947
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.23039215686274508,
"acc_stderr": 0.02955429260569506,
"acc_norm": 0.23039215686274508,
"acc_norm_stderr": 0.02955429260569506
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.24242424242424243,
"acc_stderr": 0.033464098810559534,
"acc_norm": 0.24242424242424243,
"acc_norm_stderr": 0.033464098810559534
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24357405140758873,
"mc1_stderr": 0.015026354824910782,
"mc2": NaN,
"mc2_stderr": NaN
},
"harness|ko_commongen_v2|2": {
"acc": 0.10153482880755609,
"acc_stderr": 0.010384198041619998,
"acc_norm": 0.36835891381345925,
"acc_norm_stderr": 0.01658385898263907
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "mssma/ko-solar-10.7b-v0.1",
"model_sha": "177bd341688cec645bd92b126c309d0a684f3a4f",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}