results / GAI-LLM /KoSOLAR-10.7B-mixed-v13 /result_2024-01-09 01:19:12.json
open-ko-llm-bot's picture
Add results for 2024-01-09 01:19:12
a2ad4b4 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3430034129692833,
"acc_stderr": 0.013872423223718169,
"acc_norm": 0.35494880546075086,
"acc_norm_stderr": 0.013983036904094095
},
"harness|ko_hellaswag|10": {
"acc": 0.317167894841665,
"acc_stderr": 0.004644223294727725,
"acc_norm": 0.37402907787293366,
"acc_norm_stderr": 0.0048288229209152295
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5730994152046783,
"acc_stderr": 0.03793620616529916,
"acc_norm": 0.5730994152046783,
"acc_norm_stderr": 0.03793620616529916
},
"harness|ko_mmlu_management|5": {
"acc": 0.6019417475728155,
"acc_stderr": 0.048467482539772386,
"acc_norm": 0.6019417475728155,
"acc_norm_stderr": 0.048467482539772386
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6270753512132823,
"acc_stderr": 0.01729286826945393,
"acc_norm": 0.6270753512132823,
"acc_norm_stderr": 0.01729286826945393
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.45185185185185184,
"acc_stderr": 0.04299268905480863,
"acc_norm": 0.45185185185185184,
"acc_norm_stderr": 0.04299268905480863
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.5063829787234042,
"acc_stderr": 0.03268335899936336,
"acc_norm": 0.5063829787234042,
"acc_norm_stderr": 0.03268335899936336
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5337620578778135,
"acc_stderr": 0.028333277109562783,
"acc_norm": 0.5337620578778135,
"acc_norm_stderr": 0.028333277109562783
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5067264573991032,
"acc_stderr": 0.03355476596234354,
"acc_norm": 0.5067264573991032,
"acc_norm_stderr": 0.03355476596234354
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.648854961832061,
"acc_stderr": 0.04186445163013751,
"acc_norm": 0.648854961832061,
"acc_norm_stderr": 0.04186445163013751
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.45,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.45,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.03427308652999936,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.03427308652999936
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.47586206896551725,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.47586206896551725,
"acc_norm_stderr": 0.0416180850350153
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5630252100840336,
"acc_stderr": 0.03221943636566196,
"acc_norm": 0.5630252100840336,
"acc_norm_stderr": 0.03221943636566196
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5128205128205128,
"acc_stderr": 0.02534267129380724,
"acc_norm": 0.5128205128205128,
"acc_norm_stderr": 0.02534267129380724
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.55,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.55,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04803752235190192,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04803752235190192
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.41379310344827586,
"acc_stderr": 0.03465304488406796,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.03465304488406796
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5774193548387097,
"acc_stderr": 0.02810096472427264,
"acc_norm": 0.5774193548387097,
"acc_norm_stderr": 0.02810096472427264
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7606837606837606,
"acc_stderr": 0.027951826808924333,
"acc_norm": 0.7606837606837606,
"acc_norm_stderr": 0.027951826808924333
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.47924528301886793,
"acc_stderr": 0.030746349975723463,
"acc_norm": 0.47924528301886793,
"acc_norm_stderr": 0.030746349975723463
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5545454545454546,
"acc_stderr": 0.047605488214603246,
"acc_norm": 0.5545454545454546,
"acc_norm_stderr": 0.047605488214603246
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.028226446749683515,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.028226446749683515
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.038615575462551684,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.038615575462551684
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6517412935323383,
"acc_stderr": 0.033687874661154596,
"acc_norm": 0.6517412935323383,
"acc_norm_stderr": 0.033687874661154596
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5028901734104047,
"acc_stderr": 0.038124005659748335,
"acc_norm": 0.5028901734104047,
"acc_norm_stderr": 0.038124005659748335
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4021164021164021,
"acc_stderr": 0.025253032554997692,
"acc_norm": 0.4021164021164021,
"acc_norm_stderr": 0.025253032554997692
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4583333333333333,
"acc_stderr": 0.04166666666666665,
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.04166666666666665
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695238,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695238
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5375722543352601,
"acc_stderr": 0.026842985519615375,
"acc_norm": 0.5375722543352601,
"acc_norm_stderr": 0.026842985519615375
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4539877300613497,
"acc_stderr": 0.0391170190467718,
"acc_norm": 0.4539877300613497,
"acc_norm_stderr": 0.0391170190467718
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5833333333333334,
"acc_stderr": 0.02743162372241501,
"acc_norm": 0.5833333333333334,
"acc_norm_stderr": 0.02743162372241501
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6580310880829016,
"acc_stderr": 0.03423465100104284,
"acc_norm": 0.6580310880829016,
"acc_norm_stderr": 0.03423465100104284
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.34210526315789475,
"acc_stderr": 0.04462917535336937,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.04462917535336937
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6568807339449542,
"acc_stderr": 0.020354777736086037,
"acc_norm": 0.6568807339449542,
"acc_norm_stderr": 0.020354777736086037
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5424836601307189,
"acc_stderr": 0.028526383452142638,
"acc_norm": 0.5424836601307189,
"acc_norm_stderr": 0.028526383452142638
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7107438016528925,
"acc_stderr": 0.041391127276354626,
"acc_norm": 0.7107438016528925,
"acc_norm_stderr": 0.041391127276354626
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5,
"acc_stderr": 0.04068942293855797,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04068942293855797
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.42483660130718953,
"acc_stderr": 0.01999797303545833,
"acc_norm": 0.42483660130718953,
"acc_norm_stderr": 0.01999797303545833
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.40070921985815605,
"acc_stderr": 0.02923346574557309,
"acc_norm": 0.40070921985815605,
"acc_norm_stderr": 0.02923346574557309
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285712,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285712
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4861111111111111,
"acc_stderr": 0.034086558679777494,
"acc_norm": 0.4861111111111111,
"acc_norm_stderr": 0.034086558679777494
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24134078212290502,
"acc_stderr": 0.014310999547961454,
"acc_norm": 0.24134078212290502,
"acc_norm_stderr": 0.014310999547961454
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.45,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.48161764705882354,
"acc_stderr": 0.030352303395351964,
"acc_norm": 0.48161764705882354,
"acc_norm_stderr": 0.030352303395351964
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5673469387755102,
"acc_stderr": 0.031717528240626645,
"acc_norm": 0.5673469387755102,
"acc_norm_stderr": 0.031717528240626645
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7088607594936709,
"acc_stderr": 0.029571601065753374,
"acc_norm": 0.7088607594936709,
"acc_norm_stderr": 0.029571601065753374
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.36114732724902215,
"acc_stderr": 0.012267935477519046,
"acc_norm": 0.36114732724902215,
"acc_norm_stderr": 0.012267935477519046
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6029411764705882,
"acc_stderr": 0.034341311647191286,
"acc_norm": 0.6029411764705882,
"acc_norm_stderr": 0.034341311647191286
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6181818181818182,
"acc_stderr": 0.03793713171165633,
"acc_norm": 0.6181818181818182,
"acc_norm_stderr": 0.03793713171165633
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2252141982864137,
"mc1_stderr": 0.01462324076802348,
"mc2": 0.42495215761771127,
"mc2_stderr": 0.016033708445870685
},
"harness|ko_commongen_v2|2": {
"acc": 0.23376623376623376,
"acc_stderr": 0.014550782587103121,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.015311853110300354
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "GAI-LLM/KoSOLAR-10.7B-mixed-v13",
"model_sha": "b71be1d2f6ce126c7cf7b2857fd2411be02368b5",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}