results / mssma /ko-solar-10.7b-v0.2c /result_2024-05-13 06:24:52.json
open-ko-llm-bot's picture
Add results for 2024-05-13 06:24:52
27384bc verified
raw
history blame contribute delete
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.19027303754266212,
"acc_stderr": 0.011470424179225704,
"acc_norm": 0.2593856655290102,
"acc_norm_stderr": 0.012808273573927106
},
"harness|ko_hellaswag|10": {
"acc": 0.24805815574586737,
"acc_stderr": 0.0043100310444591575,
"acc_norm": 0.2484564827723561,
"acc_norm_stderr": 0.004312347492538338
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.03615507630310934,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03615507630310934
},
"harness|ko_mmlu_management|5": {
"acc": 0.20388349514563106,
"acc_stderr": 0.0398913985953177,
"acc_norm": 0.20388349514563106,
"acc_norm_stderr": 0.0398913985953177
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2503192848020434,
"acc_stderr": 0.015491088951494597,
"acc_norm": 0.2503192848020434,
"acc_norm_stderr": 0.015491088951494597
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.18518518518518517,
"acc_stderr": 0.03355677216313143,
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.03355677216313143
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.28085106382978725,
"acc_stderr": 0.02937917046412482,
"acc_norm": 0.28085106382978725,
"acc_norm_stderr": 0.02937917046412482
},
"harness|ko_mmlu_virology|5": {
"acc": 0.29518072289156627,
"acc_stderr": 0.0355092018568963,
"acc_norm": 0.29518072289156627,
"acc_norm_stderr": 0.0355092018568963
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.1832797427652733,
"acc_stderr": 0.021974198848265812,
"acc_norm": 0.1832797427652733,
"acc_norm_stderr": 0.021974198848265812
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.30493273542600896,
"acc_stderr": 0.030898610882477518,
"acc_norm": 0.30493273542600896,
"acc_norm_stderr": 0.030898610882477518
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.24427480916030533,
"acc_stderr": 0.037683359597287434,
"acc_norm": 0.24427480916030533,
"acc_norm_stderr": 0.037683359597287434
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.21212121212121213,
"acc_stderr": 0.029126522834586825,
"acc_norm": 0.21212121212121213,
"acc_norm_stderr": 0.029126522834586825
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2206896551724138,
"acc_stderr": 0.03455930201924812,
"acc_norm": 0.2206896551724138,
"acc_norm_stderr": 0.03455930201924812
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.044405219061793254,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.044405219061793254
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2184873949579832,
"acc_stderr": 0.02684151432295894,
"acc_norm": 0.2184873949579832,
"acc_norm_stderr": 0.02684151432295894
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2128205128205128,
"acc_stderr": 0.020752423722128006,
"acc_norm": 0.2128205128205128,
"acc_norm_stderr": 0.020752423722128006
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.28703703703703703,
"acc_stderr": 0.043733130409147614,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.043733130409147614
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.20689655172413793,
"acc_stderr": 0.02850137816789395,
"acc_norm": 0.20689655172413793,
"acc_norm_stderr": 0.02850137816789395
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2032258064516129,
"acc_stderr": 0.02289168798455495,
"acc_norm": 0.2032258064516129,
"acc_norm_stderr": 0.02289168798455495
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.32051282051282054,
"acc_stderr": 0.030572811310299604,
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.030572811310299604
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2528301886792453,
"acc_stderr": 0.02674989977124123,
"acc_norm": 0.2528301886792453,
"acc_norm_stderr": 0.02674989977124123
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2909090909090909,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.04350271442923243
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.22592592592592592,
"acc_stderr": 0.025497532639609556,
"acc_norm": 0.22592592592592592,
"acc_norm_stderr": 0.025497532639609556
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.19205298013245034,
"acc_stderr": 0.032162984205936156,
"acc_norm": 0.19205298013245034,
"acc_norm_stderr": 0.032162984205936156
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.25870646766169153,
"acc_stderr": 0.03096590312357304,
"acc_norm": 0.25870646766169153,
"acc_norm_stderr": 0.03096590312357304
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2254335260115607,
"acc_stderr": 0.03186209851641142,
"acc_norm": 0.2254335260115607,
"acc_norm_stderr": 0.03186209851641142
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.021935878081184756,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.021935878081184756
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080342,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080342
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2514450867052023,
"acc_stderr": 0.02335736578587403,
"acc_norm": 0.2514450867052023,
"acc_norm_stderr": 0.02335736578587403
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.2392638036809816,
"acc_stderr": 0.033519538795212696,
"acc_norm": 0.2392638036809816,
"acc_norm_stderr": 0.033519538795212696
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.24382716049382716,
"acc_stderr": 0.023891879541959607,
"acc_norm": 0.24382716049382716,
"acc_norm_stderr": 0.023891879541959607
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036844,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036844
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.21243523316062177,
"acc_stderr": 0.02951928261681725,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.02951928261681725
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537317,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537317
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.21100917431192662,
"acc_stderr": 0.017493922404112648,
"acc_norm": 0.21100917431192662,
"acc_norm_stderr": 0.017493922404112648
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.03670066451047181,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.03670066451047181
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.02392915551735129,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.02392915551735129
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.24793388429752067,
"acc_stderr": 0.03941897526516303,
"acc_norm": 0.24793388429752067,
"acc_norm_stderr": 0.03941897526516303
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.03317672787533157,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.03317672787533157
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2581699346405229,
"acc_stderr": 0.017704531653250075,
"acc_norm": 0.2581699346405229,
"acc_norm_stderr": 0.017704531653250075
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.24468085106382978,
"acc_stderr": 0.02564555362226672,
"acc_norm": 0.24468085106382978,
"acc_norm_stderr": 0.02564555362226672
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952685,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952685
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.17592592592592593,
"acc_stderr": 0.02596742095825853,
"acc_norm": 0.17592592592592593,
"acc_norm_stderr": 0.02596742095825853
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24916201117318434,
"acc_stderr": 0.014465893829859926,
"acc_norm": 0.24916201117318434,
"acc_norm_stderr": 0.014465893829859926
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.19852941176470587,
"acc_stderr": 0.024231013370541107,
"acc_norm": 0.19852941176470587,
"acc_norm_stderr": 0.024231013370541107
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.20408163265306123,
"acc_stderr": 0.025801283475090496,
"acc_norm": 0.20408163265306123,
"acc_norm_stderr": 0.025801283475090496
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.270042194092827,
"acc_stderr": 0.028900721906293426,
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24902216427640156,
"acc_stderr": 0.01104489226404077,
"acc_norm": 0.24902216427640156,
"acc_norm_stderr": 0.01104489226404077
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2696078431372549,
"acc_stderr": 0.031145570659486782,
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.031145570659486782
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.03453131801885415,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.03453131801885415
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2386780905752754,
"mc1_stderr": 0.014922629695456418,
"mc2": NaN,
"mc2_stderr": NaN
},
"harness|ko_commongen_v2|2": {
"acc": 0.09327036599763873,
"acc_stderr": 0.00999828619027673,
"acc_norm": 0.33293978748524206,
"acc_norm_stderr": 0.016202431208373776
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "mssma/ko-solar-10.7b-v0.2c",
"model_sha": "e7b3fb51a50b86976694b6cb3510a8a1d8966b62",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}