results / AIdenU /SOLAR-10.7b-ko-Y24_v1.0 /result_2024-03-18 01:07:12.json
open-ko-llm-bot's picture
Add results for 2024-03-18 01:07:12
1ebfbc7 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4197952218430034,
"acc_stderr": 0.01442218122630303,
"acc_norm": 0.48378839590443684,
"acc_norm_stderr": 0.014603708567414947
},
"harness|ko_hellaswag|10": {
"acc": 0.4303923521210914,
"acc_stderr": 0.0049411916073179105,
"acc_norm": 0.5866361282613025,
"acc_norm_stderr": 0.004914305798575694
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6608187134502924,
"acc_stderr": 0.03631053496488905,
"acc_norm": 0.6608187134502924,
"acc_norm_stderr": 0.03631053496488905
},
"harness|ko_mmlu_management|5": {
"acc": 0.6310679611650486,
"acc_stderr": 0.0477761518115674,
"acc_norm": 0.6310679611650486,
"acc_norm_stderr": 0.0477761518115674
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6564495530012772,
"acc_stderr": 0.01698214563265247,
"acc_norm": 0.6564495530012772,
"acc_norm_stderr": 0.01698214563265247
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4148148148148148,
"acc_stderr": 0.04256193767901406,
"acc_norm": 0.4148148148148148,
"acc_norm_stderr": 0.04256193767901406
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.49361702127659574,
"acc_stderr": 0.032683358999363386,
"acc_norm": 0.49361702127659574,
"acc_norm_stderr": 0.032683358999363386
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4819277108433735,
"acc_stderr": 0.03889951252827216,
"acc_norm": 0.4819277108433735,
"acc_norm_stderr": 0.03889951252827216
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6045016077170418,
"acc_stderr": 0.027770918531427838,
"acc_norm": 0.6045016077170418,
"acc_norm_stderr": 0.027770918531427838
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.547085201793722,
"acc_stderr": 0.03340867501923324,
"acc_norm": 0.547085201793722,
"acc_norm_stderr": 0.03340867501923324
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6335877862595419,
"acc_stderr": 0.04225875451969638,
"acc_norm": 0.6335877862595419,
"acc_norm_stderr": 0.04225875451969638
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7424242424242424,
"acc_stderr": 0.03115626951964684,
"acc_norm": 0.7424242424242424,
"acc_norm_stderr": 0.03115626951964684
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5310344827586206,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.5310344827586206,
"acc_norm_stderr": 0.04158632762097828
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006716,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006716
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5840336134453782,
"acc_stderr": 0.0320165010073961,
"acc_norm": 0.5840336134453782,
"acc_norm_stderr": 0.0320165010073961
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5025641025641026,
"acc_stderr": 0.025350672979412184,
"acc_norm": 0.5025641025641026,
"acc_norm_stderr": 0.025350672979412184
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6296296296296297,
"acc_stderr": 0.04668408033024931,
"acc_norm": 0.6296296296296297,
"acc_norm_stderr": 0.04668408033024931
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.37438423645320196,
"acc_stderr": 0.03405155380561952,
"acc_norm": 0.37438423645320196,
"acc_norm_stderr": 0.03405155380561952
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.6225806451612903,
"acc_stderr": 0.027575960723278243,
"acc_norm": 0.6225806451612903,
"acc_norm_stderr": 0.027575960723278243
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.811965811965812,
"acc_stderr": 0.02559819368665225,
"acc_norm": 0.811965811965812,
"acc_norm_stderr": 0.02559819368665225
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5358490566037736,
"acc_stderr": 0.030693675018458003,
"acc_norm": 0.5358490566037736,
"acc_norm_stderr": 0.030693675018458003
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.04709306978661895,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.04709306978661895
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028742040903948485,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028742040903948485
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.36423841059602646,
"acc_stderr": 0.03929111781242741,
"acc_norm": 0.36423841059602646,
"acc_norm_stderr": 0.03929111781242741
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6915422885572139,
"acc_stderr": 0.03265819588512697,
"acc_norm": 0.6915422885572139,
"acc_norm_stderr": 0.03265819588512697
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5086705202312138,
"acc_stderr": 0.038118909889404126,
"acc_norm": 0.5086705202312138,
"acc_norm_stderr": 0.038118909889404126
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.36507936507936506,
"acc_stderr": 0.024796060602699947,
"acc_norm": 0.36507936507936506,
"acc_norm_stderr": 0.024796060602699947
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.04174752578923185,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.04174752578923185
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.6040462427745664,
"acc_stderr": 0.02632981334194624,
"acc_norm": 0.6040462427745664,
"acc_norm_stderr": 0.02632981334194624
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5398773006134969,
"acc_stderr": 0.03915857291436972,
"acc_norm": 0.5398773006134969,
"acc_norm_stderr": 0.03915857291436972
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6203703703703703,
"acc_stderr": 0.027002521034516478,
"acc_norm": 0.6203703703703703,
"acc_norm_stderr": 0.027002521034516478
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7253886010362695,
"acc_stderr": 0.03221024508041154,
"acc_norm": 0.7253886010362695,
"acc_norm_stderr": 0.03221024508041154
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.43859649122807015,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.43859649122807015,
"acc_norm_stderr": 0.04668000738510455
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6385321100917432,
"acc_stderr": 0.02059808200993737,
"acc_norm": 0.6385321100917432,
"acc_norm_stderr": 0.02059808200993737
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3492063492063492,
"acc_stderr": 0.04263906892795133,
"acc_norm": 0.3492063492063492,
"acc_norm_stderr": 0.04263906892795133
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5359477124183006,
"acc_stderr": 0.028555827516528777,
"acc_norm": 0.5359477124183006,
"acc_norm_stderr": 0.028555827516528777
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.55,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.55,
"acc_norm_stderr": 0.049999999999999996
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.04065578140908705,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.04065578140908705
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5723684210526315,
"acc_stderr": 0.04026097083296564,
"acc_norm": 0.5723684210526315,
"acc_norm_stderr": 0.04026097083296564
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.477124183006536,
"acc_stderr": 0.020206653187884782,
"acc_norm": 0.477124183006536,
"acc_norm_stderr": 0.020206653187884782
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.40070921985815605,
"acc_stderr": 0.029233465745573083,
"acc_norm": 0.40070921985815605,
"acc_norm_stderr": 0.029233465745573083
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.38392857142857145,
"acc_stderr": 0.046161430750285455,
"acc_norm": 0.38392857142857145,
"acc_norm_stderr": 0.046161430750285455
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5231481481481481,
"acc_stderr": 0.034063153607115065,
"acc_norm": 0.5231481481481481,
"acc_norm_stderr": 0.034063153607115065
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.19553072625698323,
"acc_stderr": 0.013264579220945106,
"acc_norm": 0.19553072625698323,
"acc_norm_stderr": 0.013264579220945106
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.68,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5147058823529411,
"acc_stderr": 0.03035969707904612,
"acc_norm": 0.5147058823529411,
"acc_norm_stderr": 0.03035969707904612
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6326530612244898,
"acc_stderr": 0.03086214492108757,
"acc_norm": 0.6326530612244898,
"acc_norm_stderr": 0.03086214492108757
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7088607594936709,
"acc_stderr": 0.029571601065753374,
"acc_norm": 0.7088607594936709,
"acc_norm_stderr": 0.029571601065753374
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.4041720990873533,
"acc_stderr": 0.012533504046491367,
"acc_norm": 0.4041720990873533,
"acc_norm_stderr": 0.012533504046491367
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6421568627450981,
"acc_stderr": 0.03364487286088298,
"acc_norm": 0.6421568627450981,
"acc_norm_stderr": 0.03364487286088298
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.03713158067481914,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.03713158067481914
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2533659730722154,
"mc1_stderr": 0.015225899340826837,
"mc2": 0.4116463111717996,
"mc2_stderr": 0.014902796745251457
},
"harness|ko_commongen_v2|2": {
"acc": 0.43565525383707204,
"acc_stderr": 0.017047415229476327,
"acc_norm": 0.4935064935064935,
"acc_norm_stderr": 0.01718890435907731
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIdenU/SOLAR-10.7b-ko-Y24_v1.0",
"model_sha": "12af074c34713e89135226b12f6f59d2036234c4",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}