results / HanaGroup /Mini_Orca_16_32 /result_2023-11-20 12:28:29.json
open-ko-llm-bot's picture
Add results for 2023-11-20 12:28:29
70b93a5
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.439419795221843,
"acc_stderr": 0.014503747823580127,
"acc_norm": 0.4778156996587031,
"acc_norm_stderr": 0.014597001927076136
},
"harness|ko_hellaswag|10": {
"acc": 0.4076877116112328,
"acc_stderr": 0.00490400267618433,
"acc_norm": 0.5386377215694085,
"acc_norm_stderr": 0.0049748608784644325
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5789473684210527,
"acc_stderr": 0.03786720706234214,
"acc_norm": 0.5789473684210527,
"acc_norm_stderr": 0.03786720706234214
},
"harness|ko_mmlu_management|5": {
"acc": 0.6213592233009708,
"acc_stderr": 0.04802694698258975,
"acc_norm": 0.6213592233009708,
"acc_norm_stderr": 0.04802694698258975
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5721583652618135,
"acc_stderr": 0.017692787927803724,
"acc_norm": 0.5721583652618135,
"acc_norm_stderr": 0.017692787927803724
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.04244633238353229,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.04244633238353229
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.42127659574468085,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.42127659574468085,
"acc_norm_stderr": 0.03227834510146268
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4036144578313253,
"acc_stderr": 0.03819486140758397,
"acc_norm": 0.4036144578313253,
"acc_norm_stderr": 0.03819486140758397
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4983922829581994,
"acc_stderr": 0.02839794490780661,
"acc_norm": 0.4983922829581994,
"acc_norm_stderr": 0.02839794490780661
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.515695067264574,
"acc_stderr": 0.0335412657542081,
"acc_norm": 0.515695067264574,
"acc_norm_stderr": 0.0335412657542081
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48854961832061067,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.48854961832061067,
"acc_norm_stderr": 0.043841400240780176
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5959595959595959,
"acc_stderr": 0.03496130972056127,
"acc_norm": 0.5959595959595959,
"acc_norm_stderr": 0.03496130972056127
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.041307408795554966,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.041307408795554966
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643966,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643966
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5042016806722689,
"acc_stderr": 0.03247734334448111,
"acc_norm": 0.5042016806722689,
"acc_norm_stderr": 0.03247734334448111
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.44358974358974357,
"acc_stderr": 0.025189149894764194,
"acc_norm": 0.44358974358974357,
"acc_norm_stderr": 0.025189149894764194
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.04832853553437055,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.04832853553437055
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3694581280788177,
"acc_stderr": 0.03395970381998574,
"acc_norm": 0.3694581280788177,
"acc_norm_stderr": 0.03395970381998574
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5,
"acc_stderr": 0.028444006199428714,
"acc_norm": 0.5,
"acc_norm_stderr": 0.028444006199428714
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7393162393162394,
"acc_stderr": 0.02876034895652341,
"acc_norm": 0.7393162393162394,
"acc_norm_stderr": 0.02876034895652341
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4716981132075472,
"acc_stderr": 0.030723535249006114,
"acc_norm": 0.4716981132075472,
"acc_norm_stderr": 0.030723535249006114
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5181818181818182,
"acc_stderr": 0.04785964010794915,
"acc_norm": 0.5181818181818182,
"acc_norm_stderr": 0.04785964010794915
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2851851851851852,
"acc_stderr": 0.027528599210340492,
"acc_norm": 0.2851851851851852,
"acc_norm_stderr": 0.027528599210340492
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526732,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526732
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6218905472636815,
"acc_stderr": 0.03428867848778658,
"acc_norm": 0.6218905472636815,
"acc_norm_stderr": 0.03428867848778658
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.41040462427745666,
"acc_stderr": 0.03750757044895538,
"acc_norm": 0.41040462427745666,
"acc_norm_stderr": 0.03750757044895538
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4126984126984127,
"acc_stderr": 0.02535574126305527,
"acc_norm": 0.4126984126984127,
"acc_norm_stderr": 0.02535574126305527
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.375,
"acc_stderr": 0.04048439222695598,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04048439222695598
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5260115606936416,
"acc_stderr": 0.026882643434022895,
"acc_norm": 0.5260115606936416,
"acc_norm_stderr": 0.026882643434022895
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5214723926380368,
"acc_stderr": 0.03924746876751129,
"acc_norm": 0.5214723926380368,
"acc_norm_stderr": 0.03924746876751129
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.02780165621232366,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.02780165621232366
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5492227979274611,
"acc_stderr": 0.035909109522355244,
"acc_norm": 0.5492227979274611,
"acc_norm_stderr": 0.035909109522355244
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.0433913832257986,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.0433913832257986
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5339449541284403,
"acc_stderr": 0.021387863350353996,
"acc_norm": 0.5339449541284403,
"acc_norm_stderr": 0.021387863350353996
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.04375888492727062,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.04375888492727062
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.477124183006536,
"acc_stderr": 0.028599936776089786,
"acc_norm": 0.477124183006536,
"acc_norm_stderr": 0.028599936776089786
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6859504132231405,
"acc_stderr": 0.04236964753041018,
"acc_norm": 0.6859504132231405,
"acc_norm_stderr": 0.04236964753041018
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.46710526315789475,
"acc_stderr": 0.040601270352363966,
"acc_norm": 0.46710526315789475,
"acc_norm_stderr": 0.040601270352363966
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.39869281045751637,
"acc_stderr": 0.019808281317449848,
"acc_norm": 0.39869281045751637,
"acc_norm_stderr": 0.019808281317449848
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30851063829787234,
"acc_stderr": 0.027553366165101362,
"acc_norm": 0.30851063829787234,
"acc_norm_stderr": 0.027553366165101362
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.04697113923010213,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.04697113923010213
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.03293377139415191,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.03293377139415191
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2122905027932961,
"acc_stderr": 0.013676644685831725,
"acc_norm": 0.2122905027932961,
"acc_norm_stderr": 0.013676644685831725
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3492647058823529,
"acc_stderr": 0.028959755196824873,
"acc_norm": 0.3492647058823529,
"acc_norm_stderr": 0.028959755196824873
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5306122448979592,
"acc_stderr": 0.031949171367580624,
"acc_norm": 0.5306122448979592,
"acc_norm_stderr": 0.031949171367580624
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6708860759493671,
"acc_stderr": 0.030587326294702368,
"acc_norm": 0.6708860759493671,
"acc_norm_stderr": 0.030587326294702368
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3520208604954368,
"acc_stderr": 0.012198140605353592,
"acc_norm": 0.3520208604954368,
"acc_norm_stderr": 0.012198140605353592
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4803921568627451,
"acc_stderr": 0.03506612560524866,
"acc_norm": 0.4803921568627451,
"acc_norm_stderr": 0.03506612560524866
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5454545454545454,
"acc_stderr": 0.03888176921674101,
"acc_norm": 0.5454545454545454,
"acc_norm_stderr": 0.03888176921674101
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2998776009791922,
"mc1_stderr": 0.016040352966713606,
"mc2": 0.4699398119482503,
"mc2_stderr": 0.015489346893307833
},
"harness|ko_commongen_v2|2": {
"acc": 0.4793388429752066,
"acc_stderr": 0.01717567127983645,
"acc_norm": 0.5218417945690673,
"acc_norm_stderr": 0.017173944474294378
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "HanaGroup/Mini_Orca_16_32",
"model_sha": "1356bee33d15e26ae9738a179058f993134f6141",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}