results / Changgil /K2S3-v0.1 /result_2024-04-29 01:06:23.json
open-ko-llm-bot's picture
Add results for 2024-04-29 01:06:23
91e1e8d verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.45051194539249145,
"acc_stderr": 0.014539646098471627,
"acc_norm": 0.5008532423208191,
"acc_norm_stderr": 0.014611369529813269
},
"harness|ko_hellaswag|10": {
"acc": 0.4230233021310496,
"acc_stderr": 0.004930293787545619,
"acc_norm": 0.5676160127464649,
"acc_norm_stderr": 0.0049439450696114546
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6374269005847953,
"acc_stderr": 0.0368713061556206,
"acc_norm": 0.6374269005847953,
"acc_norm_stderr": 0.0368713061556206
},
"harness|ko_mmlu_management|5": {
"acc": 0.6019417475728155,
"acc_stderr": 0.048467482539772386,
"acc_norm": 0.6019417475728155,
"acc_norm_stderr": 0.048467482539772386
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6168582375478927,
"acc_stderr": 0.01738477419488562,
"acc_norm": 0.6168582375478927,
"acc_norm_stderr": 0.01738477419488562
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.04316378599511326,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.04316378599511326
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4595744680851064,
"acc_stderr": 0.032579014820998356,
"acc_norm": 0.4595744680851064,
"acc_norm_stderr": 0.032579014820998356
},
"harness|ko_mmlu_virology|5": {
"acc": 0.46987951807228917,
"acc_stderr": 0.03885425420866766,
"acc_norm": 0.46987951807228917,
"acc_norm_stderr": 0.03885425420866766
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5594855305466238,
"acc_stderr": 0.028196400574197422,
"acc_norm": 0.5594855305466238,
"acc_norm_stderr": 0.028196400574197422
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5739910313901345,
"acc_stderr": 0.03318833286217281,
"acc_norm": 0.5739910313901345,
"acc_norm_stderr": 0.03318833286217281
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5725190839694656,
"acc_stderr": 0.04338920305792401,
"acc_norm": 0.5725190839694656,
"acc_norm_stderr": 0.04338920305792401
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.03358618145732524,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.03358618145732524
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.45517241379310347,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.45517241379310347,
"acc_norm_stderr": 0.04149886942192117
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808778,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808778
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5798319327731093,
"acc_stderr": 0.032061837832361516,
"acc_norm": 0.5798319327731093,
"acc_norm_stderr": 0.032061837832361516
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5333333333333333,
"acc_stderr": 0.025294608023986462,
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.025294608023986462
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6018518518518519,
"acc_stderr": 0.04732332615978814,
"acc_norm": 0.6018518518518519,
"acc_norm_stderr": 0.04732332615978814
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4482758620689655,
"acc_stderr": 0.034991131376767445,
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.034991131376767445
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.6,
"acc_stderr": 0.027869320571664632,
"acc_norm": 0.6,
"acc_norm_stderr": 0.027869320571664632
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7264957264957265,
"acc_stderr": 0.029202540153431187,
"acc_norm": 0.7264957264957265,
"acc_norm_stderr": 0.029202540153431187
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5471698113207547,
"acc_stderr": 0.03063562795796182,
"acc_norm": 0.5471698113207547,
"acc_norm_stderr": 0.03063562795796182
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.04709306978661895,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.04709306978661895
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3296296296296296,
"acc_stderr": 0.028661201116524572,
"acc_norm": 0.3296296296296296,
"acc_norm_stderr": 0.028661201116524572
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6965174129353234,
"acc_stderr": 0.03251006816458618,
"acc_norm": 0.6965174129353234,
"acc_norm_stderr": 0.03251006816458618
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5433526011560693,
"acc_stderr": 0.03798106566014498,
"acc_norm": 0.5433526011560693,
"acc_norm_stderr": 0.03798106566014498
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.37566137566137564,
"acc_stderr": 0.02494236893115979,
"acc_norm": 0.37566137566137564,
"acc_norm_stderr": 0.02494236893115979
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4583333333333333,
"acc_stderr": 0.04166666666666665,
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.04166666666666665
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5491329479768786,
"acc_stderr": 0.026788811931562757,
"acc_norm": 0.5491329479768786,
"acc_norm_stderr": 0.026788811931562757
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5214723926380368,
"acc_stderr": 0.03924746876751129,
"acc_norm": 0.5214723926380368,
"acc_norm_stderr": 0.03924746876751129
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5401234567901234,
"acc_stderr": 0.027731022753539274,
"acc_norm": 0.5401234567901234,
"acc_norm_stderr": 0.027731022753539274
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.689119170984456,
"acc_stderr": 0.03340361906276586,
"acc_norm": 0.689119170984456,
"acc_norm_stderr": 0.03340361906276586
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.04266339443159394,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159394
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6770642201834862,
"acc_stderr": 0.020048115923415342,
"acc_norm": 0.6770642201834862,
"acc_norm_stderr": 0.020048115923415342
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3253968253968254,
"acc_stderr": 0.04190596438871136,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.04190596438871136
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5718954248366013,
"acc_stderr": 0.028332397483664274,
"acc_norm": 0.5718954248366013,
"acc_norm_stderr": 0.028332397483664274
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6776859504132231,
"acc_stderr": 0.042664163633521685,
"acc_norm": 0.6776859504132231,
"acc_norm_stderr": 0.042664163633521685
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5328947368421053,
"acc_stderr": 0.04060127035236395,
"acc_norm": 0.5328947368421053,
"acc_norm_stderr": 0.04060127035236395
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.49673202614379086,
"acc_stderr": 0.020227402794434867,
"acc_norm": 0.49673202614379086,
"acc_norm_stderr": 0.020227402794434867
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.35106382978723405,
"acc_stderr": 0.028473501272963775,
"acc_norm": 0.35106382978723405,
"acc_norm_stderr": 0.028473501272963775
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.38392857142857145,
"acc_stderr": 0.046161430750285455,
"acc_norm": 0.38392857142857145,
"acc_norm_stderr": 0.046161430750285455
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.44907407407407407,
"acc_stderr": 0.03392238405321617,
"acc_norm": 0.44907407407407407,
"acc_norm_stderr": 0.03392238405321617
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.30726256983240224,
"acc_stderr": 0.01543015884646961,
"acc_norm": 0.30726256983240224,
"acc_norm_stderr": 0.01543015884646961
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4485294117647059,
"acc_stderr": 0.0302114796091216,
"acc_norm": 0.4485294117647059,
"acc_norm_stderr": 0.0302114796091216
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6489795918367347,
"acc_stderr": 0.030555316755573644,
"acc_norm": 0.6489795918367347,
"acc_norm_stderr": 0.030555316755573644
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6919831223628692,
"acc_stderr": 0.030052389335605695,
"acc_norm": 0.6919831223628692,
"acc_norm_stderr": 0.030052389335605695
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3559322033898305,
"acc_stderr": 0.01222864553727757,
"acc_norm": 0.3559322033898305,
"acc_norm_stderr": 0.01222864553727757
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6029411764705882,
"acc_stderr": 0.034341311647191286,
"acc_norm": 0.6029411764705882,
"acc_norm_stderr": 0.034341311647191286
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6242424242424243,
"acc_stderr": 0.03781887353205983,
"acc_norm": 0.6242424242424243,
"acc_norm_stderr": 0.03781887353205983
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2827417380660955,
"mc1_stderr": 0.01576477083677731,
"mc2": 0.43390504265082586,
"mc2_stderr": 0.015336718297088065
},
"harness|ko_commongen_v2|2": {
"acc": 0.5017709563164109,
"acc_stderr": 0.017190246276231853,
"acc_norm": 0.5584415584415584,
"acc_norm_stderr": 0.01707252587556311
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Changgil/K2S3-v0.1",
"model_sha": "d544e389f091983bb4f11314edb526d81753c919",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}