results / GAI-LLM /OPEN-SOLAR-KO-10.7B-dpo-dedup /result_2024-01-31 09:23:30.json
open-ko-llm-bot's picture
Add results for 2024-01-31 09:23:30
ee39f18 verified
raw
history blame
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.35580204778157,
"acc_stderr": 0.01399057113791876,
"acc_norm": 0.40102389078498296,
"acc_norm_stderr": 0.014322255790719865
},
"harness|ko_hellaswag|10": {
"acc": 0.40659231228838877,
"acc_stderr": 0.004901936511546131,
"acc_norm": 0.5401314479187412,
"acc_norm_stderr": 0.004973683026202176
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5146198830409356,
"acc_stderr": 0.038331852752130254,
"acc_norm": 0.5146198830409356,
"acc_norm_stderr": 0.038331852752130254
},
"harness|ko_mmlu_management|5": {
"acc": 0.5825242718446602,
"acc_stderr": 0.048828405482122375,
"acc_norm": 0.5825242718446602,
"acc_norm_stderr": 0.048828405482122375
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5402298850574713,
"acc_stderr": 0.017821994096933535,
"acc_norm": 0.5402298850574713,
"acc_norm_stderr": 0.017821994096933535
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4666666666666667,
"acc_stderr": 0.04309732901036354,
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.04309732901036354
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4425531914893617,
"acc_stderr": 0.03246956919789958,
"acc_norm": 0.4425531914893617,
"acc_norm_stderr": 0.03246956919789958
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3674698795180723,
"acc_stderr": 0.03753267402120574,
"acc_norm": 0.3674698795180723,
"acc_norm_stderr": 0.03753267402120574
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5048231511254019,
"acc_stderr": 0.028396770444111298,
"acc_norm": 0.5048231511254019,
"acc_norm_stderr": 0.028396770444111298
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5022421524663677,
"acc_stderr": 0.03355746535223263,
"acc_norm": 0.5022421524663677,
"acc_norm_stderr": 0.03355746535223263
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4961832061068702,
"acc_stderr": 0.043851623256015534,
"acc_norm": 0.4961832061068702,
"acc_norm_stderr": 0.043851623256015534
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5707070707070707,
"acc_stderr": 0.03526552724601198,
"acc_norm": 0.5707070707070707,
"acc_norm_stderr": 0.03526552724601198
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5103448275862069,
"acc_stderr": 0.04165774775728763,
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.04280105837364395,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.04280105837364395
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.46218487394957986,
"acc_stderr": 0.032385469487589795,
"acc_norm": 0.46218487394957986,
"acc_norm_stderr": 0.032385469487589795
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4641025641025641,
"acc_stderr": 0.025285585990017834,
"acc_norm": 0.4641025641025641,
"acc_norm_stderr": 0.025285585990017834
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.04820403072760628,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.04820403072760628
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3891625615763547,
"acc_stderr": 0.034304624161038716,
"acc_norm": 0.3891625615763547,
"acc_norm_stderr": 0.034304624161038716
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.46774193548387094,
"acc_stderr": 0.02838474778881333,
"acc_norm": 0.46774193548387094,
"acc_norm_stderr": 0.02838474778881333
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6965811965811965,
"acc_stderr": 0.030118210106942638,
"acc_norm": 0.6965811965811965,
"acc_norm_stderr": 0.030118210106942638
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4490566037735849,
"acc_stderr": 0.030612730713641092,
"acc_norm": 0.4490566037735849,
"acc_norm_stderr": 0.030612730713641092
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.41818181818181815,
"acc_stderr": 0.047245774057315705,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.047245774057315705
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.337037037037037,
"acc_stderr": 0.028820884666253255,
"acc_norm": 0.337037037037037,
"acc_norm_stderr": 0.028820884666253255
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5422885572139303,
"acc_stderr": 0.035228658640995975,
"acc_norm": 0.5422885572139303,
"acc_norm_stderr": 0.035228658640995975
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3815028901734104,
"acc_stderr": 0.03703851193099521,
"acc_norm": 0.3815028901734104,
"acc_norm_stderr": 0.03703851193099521
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.023919984164047732,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.023919984164047732
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4236111111111111,
"acc_stderr": 0.04132125019723368,
"acc_norm": 0.4236111111111111,
"acc_norm_stderr": 0.04132125019723368
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.49710982658959535,
"acc_stderr": 0.026918645383239004,
"acc_norm": 0.49710982658959535,
"acc_norm_stderr": 0.026918645383239004
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4539877300613497,
"acc_stderr": 0.0391170190467718,
"acc_norm": 0.4539877300613497,
"acc_norm_stderr": 0.0391170190467718
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5,
"acc_stderr": 0.02782074420373286,
"acc_norm": 0.5,
"acc_norm_stderr": 0.02782074420373286
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5647668393782384,
"acc_stderr": 0.03578038165008586,
"acc_norm": 0.5647668393782384,
"acc_norm_stderr": 0.03578038165008586
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.043036840335373173,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.043036840335373173
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6036697247706422,
"acc_stderr": 0.020971469947900525,
"acc_norm": 0.6036697247706422,
"acc_norm_stderr": 0.020971469947900525
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488774,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488774
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4673202614379085,
"acc_stderr": 0.02856869975222588,
"acc_norm": 0.4673202614379085,
"acc_norm_stderr": 0.02856869975222588
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5785123966942148,
"acc_stderr": 0.04507732278775086,
"acc_norm": 0.5785123966942148,
"acc_norm_stderr": 0.04507732278775086
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.46710526315789475,
"acc_stderr": 0.04060127035236395,
"acc_norm": 0.46710526315789475,
"acc_norm_stderr": 0.04060127035236395
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4199346405228758,
"acc_stderr": 0.01996681117825648,
"acc_norm": 0.4199346405228758,
"acc_norm_stderr": 0.01996681117825648
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.02812163604063988,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.02812163604063988
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.044642857142857116,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.044642857142857116
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.032468872436376486,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.032468872436376486
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24804469273743016,
"acc_stderr": 0.01444415780826145,
"acc_norm": 0.24804469273743016,
"acc_norm_stderr": 0.01444415780826145
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.029896163033125474,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.029896163033125474
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.42448979591836733,
"acc_stderr": 0.031642094879429414,
"acc_norm": 0.42448979591836733,
"acc_norm_stderr": 0.031642094879429414
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6075949367088608,
"acc_stderr": 0.03178471874564729,
"acc_norm": 0.6075949367088608,
"acc_norm_stderr": 0.03178471874564729
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3246414602346806,
"acc_stderr": 0.011959089388530023,
"acc_norm": 0.3246414602346806,
"acc_norm_stderr": 0.011959089388530023
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5196078431372549,
"acc_stderr": 0.03506612560524866,
"acc_norm": 0.5196078431372549,
"acc_norm_stderr": 0.03506612560524866
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5636363636363636,
"acc_stderr": 0.03872592983524754,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.03872592983524754
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.30354957160342716,
"mc1_stderr": 0.01609588415538684,
"mc2": 0.46287390999405587,
"mc2_stderr": 0.01568531535775204
},
"harness|ko_commongen_v2|2": {
"acc": 0.551357733175915,
"acc_stderr": 0.01709943051472577,
"acc_norm": 0.5796930342384888,
"acc_norm_stderr": 0.01697059828117771
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-dedup",
"model_sha": "b2521334feca96155d3399a5a261f4eb80380512",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}