results / JY623 /KoSOLAR-v2.1 /result_2024-03-28 06:32:50.json
open-ko-llm-bot's picture
Add results for 2024-03-28 06:32:50
405d8c1 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.7022184300341296,
"acc_stderr": 0.013363080107244485,
"acc_norm": 0.75,
"acc_norm_stderr": 0.012653835621466646
},
"harness|ko_hellaswag|10": {
"acc": 0.5191196972714599,
"acc_stderr": 0.004986131919673963,
"acc_norm": 0.6786496713802032,
"acc_norm_stderr": 0.004660405565338769
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.7192982456140351,
"acc_stderr": 0.034462962170884265,
"acc_norm": 0.7192982456140351,
"acc_norm_stderr": 0.034462962170884265
},
"harness|ko_mmlu_management|5": {
"acc": 0.7281553398058253,
"acc_stderr": 0.044052680241409216,
"acc_norm": 0.7281553398058253,
"acc_norm_stderr": 0.044052680241409216
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.7381864623243933,
"acc_stderr": 0.01572083867844526,
"acc_norm": 0.7381864623243933,
"acc_norm_stderr": 0.01572083867844526
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4666666666666667,
"acc_stderr": 0.04309732901036354,
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.04309732901036354
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.5404255319148936,
"acc_stderr": 0.03257901482099834,
"acc_norm": 0.5404255319148936,
"acc_norm_stderr": 0.03257901482099834
},
"harness|ko_mmlu_virology|5": {
"acc": 0.5180722891566265,
"acc_stderr": 0.03889951252827216,
"acc_norm": 0.5180722891566265,
"acc_norm_stderr": 0.03889951252827216
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6495176848874598,
"acc_stderr": 0.027098652621301744,
"acc_norm": 0.6495176848874598,
"acc_norm_stderr": 0.027098652621301744
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.6681614349775785,
"acc_stderr": 0.031602951437766785,
"acc_norm": 0.6681614349775785,
"acc_norm_stderr": 0.031602951437766785
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6335877862595419,
"acc_stderr": 0.042258754519696386,
"acc_norm": 0.6335877862595419,
"acc_norm_stderr": 0.042258754519696386
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7727272727272727,
"acc_stderr": 0.029857515673386417,
"acc_norm": 0.7727272727272727,
"acc_norm_stderr": 0.029857515673386417
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5724137931034483,
"acc_stderr": 0.04122737111370332,
"acc_norm": 0.5724137931034483,
"acc_norm_stderr": 0.04122737111370332
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.047551296160629475,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.047551296160629475
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6512605042016807,
"acc_stderr": 0.030956636328566548,
"acc_norm": 0.6512605042016807,
"acc_norm_stderr": 0.030956636328566548
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.6461538461538462,
"acc_stderr": 0.02424378399406214,
"acc_norm": 0.6461538461538462,
"acc_norm_stderr": 0.02424378399406214
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.04557239513497751
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.46798029556650245,
"acc_stderr": 0.03510766597959217,
"acc_norm": 0.46798029556650245,
"acc_norm_stderr": 0.03510766597959217
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.6580645161290323,
"acc_stderr": 0.026985289576552732,
"acc_norm": 0.6580645161290323,
"acc_norm_stderr": 0.026985289576552732
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.022801382534597528,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.022801382534597528
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.6113207547169811,
"acc_stderr": 0.03000048544867599,
"acc_norm": 0.6113207547169811,
"acc_norm_stderr": 0.03000048544867599
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.4,
"acc_stderr": 0.029869605095316904,
"acc_norm": 0.4,
"acc_norm_stderr": 0.029869605095316904
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.7512437810945274,
"acc_stderr": 0.030567675938916707,
"acc_norm": 0.7512437810945274,
"acc_norm_stderr": 0.030567675938916707
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5606936416184971,
"acc_stderr": 0.037842719328874674,
"acc_norm": 0.5606936416184971,
"acc_norm_stderr": 0.037842719328874674
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.025699352832131792,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.025699352832131792
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.6319444444444444,
"acc_stderr": 0.04032999053960719,
"acc_norm": 0.6319444444444444,
"acc_norm_stderr": 0.04032999053960719
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.02599247202930639,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.02599247202930639
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.656441717791411,
"acc_stderr": 0.03731133519673893,
"acc_norm": 0.656441717791411,
"acc_norm_stderr": 0.03731133519673893
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.7067901234567902,
"acc_stderr": 0.02532988817190092,
"acc_norm": 0.7067901234567902,
"acc_norm_stderr": 0.02532988817190092
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7668393782383419,
"acc_stderr": 0.030516111371476008,
"acc_norm": 0.7668393782383419,
"acc_norm_stderr": 0.030516111371476008
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.7926605504587156,
"acc_stderr": 0.01738141556360866,
"acc_norm": 0.7926605504587156,
"acc_norm_stderr": 0.01738141556360866
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.42063492063492064,
"acc_stderr": 0.04415438226743744,
"acc_norm": 0.42063492063492064,
"acc_norm_stderr": 0.04415438226743744
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.6601307189542484,
"acc_stderr": 0.027121956071388852,
"acc_norm": 0.6601307189542484,
"acc_norm_stderr": 0.027121956071388852
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070416,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070416
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.0378272898086547
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.6078431372549019,
"acc_stderr": 0.01975172650876263,
"acc_norm": 0.6078431372549019,
"acc_norm_stderr": 0.01975172650876263
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.44680851063829785,
"acc_stderr": 0.029658235097666907,
"acc_norm": 0.44680851063829785,
"acc_norm_stderr": 0.029658235097666907
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5138888888888888,
"acc_stderr": 0.03408655867977747,
"acc_norm": 0.5138888888888888,
"acc_norm_stderr": 0.03408655867977747
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.4692737430167598,
"acc_stderr": 0.016690896161944385,
"acc_norm": 0.4692737430167598,
"acc_norm_stderr": 0.016690896161944385
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5808823529411765,
"acc_stderr": 0.02997280717046462,
"acc_norm": 0.5808823529411765,
"acc_norm_stderr": 0.02997280717046462
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.710204081632653,
"acc_stderr": 0.029043088683304345,
"acc_norm": 0.710204081632653,
"acc_norm_stderr": 0.029043088683304345
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7679324894514767,
"acc_stderr": 0.027479744550808503,
"acc_norm": 0.7679324894514767,
"acc_norm_stderr": 0.027479744550808503
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.4634941329856584,
"acc_stderr": 0.012736153390214965,
"acc_norm": 0.4634941329856584,
"acc_norm_stderr": 0.012736153390214965
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.7107843137254902,
"acc_stderr": 0.031822318676475544,
"acc_norm": 0.7107843137254902,
"acc_norm_stderr": 0.031822318676475544
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.036810508691615486,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.036810508691615486
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.6646266829865362,
"mc1_stderr": 0.016527534039668987,
"mc2": 0.7684588284159719,
"mc2_stderr": 0.013616670960296498
},
"harness|ko_commongen_v2|2": {
"acc": 0.5029515938606848,
"acc_stderr": 0.017190054580194694,
"acc_norm": 0.5218417945690673,
"acc_norm_stderr": 0.017173944474294385
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "JY623/KoSOLAR-v2.1",
"model_sha": "8edd3ffafb7d436b458d4a1c1224ef4d87ddb558",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}