|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.44880546075085326, |
|
"acc_stderr": 0.014534599585097664, |
|
"acc_norm": 0.5170648464163823, |
|
"acc_norm_stderr": 0.01460287838853659 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4569806811392153, |
|
"acc_stderr": 0.004971278309204199, |
|
"acc_norm": 0.6095399322844055, |
|
"acc_norm_stderr": 0.004868564301540814 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5789473684210527, |
|
"acc_stderr": 0.03786720706234214, |
|
"acc_norm": 0.5789473684210527, |
|
"acc_norm_stderr": 0.03786720706234214 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6504854368932039, |
|
"acc_stderr": 0.047211885060971716, |
|
"acc_norm": 0.6504854368932039, |
|
"acc_norm_stderr": 0.047211885060971716 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6628352490421456, |
|
"acc_stderr": 0.016905207420803547, |
|
"acc_norm": 0.6628352490421456, |
|
"acc_norm_stderr": 0.016905207420803547 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464244, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464244 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.49361702127659574, |
|
"acc_stderr": 0.03268335899936338, |
|
"acc_norm": 0.49361702127659574, |
|
"acc_norm_stderr": 0.03268335899936338 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4457831325301205, |
|
"acc_stderr": 0.03869543323472101, |
|
"acc_norm": 0.4457831325301205, |
|
"acc_norm_stderr": 0.03869543323472101 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5819935691318328, |
|
"acc_stderr": 0.028013651891995076, |
|
"acc_norm": 0.5819935691318328, |
|
"acc_norm_stderr": 0.028013651891995076 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5919282511210763, |
|
"acc_stderr": 0.03298574607842822, |
|
"acc_norm": 0.5919282511210763, |
|
"acc_norm_stderr": 0.03298574607842822 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6259541984732825, |
|
"acc_stderr": 0.04243869242230524, |
|
"acc_norm": 0.6259541984732825, |
|
"acc_norm_stderr": 0.04243869242230524 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7272727272727273, |
|
"acc_stderr": 0.03173071239071724, |
|
"acc_norm": 0.7272727272727273, |
|
"acc_norm_stderr": 0.03173071239071724 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.45517241379310347, |
|
"acc_stderr": 0.04149886942192117, |
|
"acc_norm": 0.45517241379310347, |
|
"acc_norm_stderr": 0.04149886942192117 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.04440521906179328, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.04440521906179328 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5714285714285714, |
|
"acc_stderr": 0.03214536859788639, |
|
"acc_norm": 0.5714285714285714, |
|
"acc_norm_stderr": 0.03214536859788639 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5128205128205128, |
|
"acc_stderr": 0.02534267129380724, |
|
"acc_norm": 0.5128205128205128, |
|
"acc_norm_stderr": 0.02534267129380724 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.0498887651569859, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.0498887651569859 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6111111111111112, |
|
"acc_stderr": 0.04712821257426769, |
|
"acc_norm": 0.6111111111111112, |
|
"acc_norm_stderr": 0.04712821257426769 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43349753694581283, |
|
"acc_stderr": 0.03486731727419872, |
|
"acc_norm": 0.43349753694581283, |
|
"acc_norm_stderr": 0.03486731727419872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5741935483870968, |
|
"acc_stderr": 0.028129112709165904, |
|
"acc_norm": 0.5741935483870968, |
|
"acc_norm_stderr": 0.028129112709165904 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7863247863247863, |
|
"acc_stderr": 0.02685345037700915, |
|
"acc_norm": 0.7863247863247863, |
|
"acc_norm_stderr": 0.02685345037700915 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5358490566037736, |
|
"acc_stderr": 0.030693675018458003, |
|
"acc_norm": 0.5358490566037736, |
|
"acc_norm_stderr": 0.030693675018458003 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5181818181818182, |
|
"acc_stderr": 0.04785964010794915, |
|
"acc_norm": 0.5181818181818182, |
|
"acc_norm_stderr": 0.04785964010794915 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3296296296296296, |
|
"acc_stderr": 0.028661201116524586, |
|
"acc_norm": 0.3296296296296296, |
|
"acc_norm_stderr": 0.028661201116524586 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6865671641791045, |
|
"acc_stderr": 0.03280188205348642, |
|
"acc_norm": 0.6865671641791045, |
|
"acc_norm_stderr": 0.03280188205348642 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4508670520231214, |
|
"acc_stderr": 0.037940126746970296, |
|
"acc_norm": 0.4508670520231214, |
|
"acc_norm_stderr": 0.037940126746970296 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.025305906241590632, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.025305906241590632 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5069444444444444, |
|
"acc_stderr": 0.04180806750294938, |
|
"acc_norm": 0.5069444444444444, |
|
"acc_norm_stderr": 0.04180806750294938 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.69, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.69, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5606936416184971, |
|
"acc_stderr": 0.026720034380514998, |
|
"acc_norm": 0.5606936416184971, |
|
"acc_norm_stderr": 0.026720034380514998 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5987654320987654, |
|
"acc_stderr": 0.027272582849839796, |
|
"acc_norm": 0.5987654320987654, |
|
"acc_norm_stderr": 0.027272582849839796 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7150259067357513, |
|
"acc_stderr": 0.032577140777096614, |
|
"acc_norm": 0.7150259067357513, |
|
"acc_norm_stderr": 0.032577140777096614 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.4298245614035088, |
|
"acc_stderr": 0.046570472605949625, |
|
"acc_norm": 0.4298245614035088, |
|
"acc_norm_stderr": 0.046570472605949625 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6752293577981652, |
|
"acc_stderr": 0.020077729109310327, |
|
"acc_norm": 0.6752293577981652, |
|
"acc_norm_stderr": 0.020077729109310327 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04216370213557835, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04216370213557835 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5588235294117647, |
|
"acc_stderr": 0.02843109544417664, |
|
"acc_norm": 0.5588235294117647, |
|
"acc_norm_stderr": 0.02843109544417664 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7024793388429752, |
|
"acc_stderr": 0.04173349148083499, |
|
"acc_norm": 0.7024793388429752, |
|
"acc_norm_stderr": 0.04173349148083499 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5460526315789473, |
|
"acc_stderr": 0.040516463428741434, |
|
"acc_norm": 0.5460526315789473, |
|
"acc_norm_stderr": 0.040516463428741434 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5016339869281046, |
|
"acc_stderr": 0.020227726838150124, |
|
"acc_norm": 0.5016339869281046, |
|
"acc_norm_stderr": 0.020227726838150124 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.41843971631205673, |
|
"acc_stderr": 0.029427994039419994, |
|
"acc_norm": 0.41843971631205673, |
|
"acc_norm_stderr": 0.029427994039419994 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.39285714285714285, |
|
"acc_stderr": 0.04635550135609976, |
|
"acc_norm": 0.39285714285714285, |
|
"acc_norm_stderr": 0.04635550135609976 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4305555555555556, |
|
"acc_stderr": 0.033769221512523345, |
|
"acc_norm": 0.4305555555555556, |
|
"acc_norm_stderr": 0.033769221512523345 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.34301675977653634, |
|
"acc_stderr": 0.015876912673057745, |
|
"acc_norm": 0.34301675977653634, |
|
"acc_norm_stderr": 0.015876912673057745 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.69, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.69, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.45588235294117646, |
|
"acc_stderr": 0.030254372573976694, |
|
"acc_norm": 0.45588235294117646, |
|
"acc_norm_stderr": 0.030254372573976694 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5183673469387755, |
|
"acc_stderr": 0.03198761546763127, |
|
"acc_norm": 0.5183673469387755, |
|
"acc_norm_stderr": 0.03198761546763127 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.759493670886076, |
|
"acc_stderr": 0.027820781981149678, |
|
"acc_norm": 0.759493670886076, |
|
"acc_norm_stderr": 0.027820781981149678 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.38396349413298564, |
|
"acc_stderr": 0.01242158783313423, |
|
"acc_norm": 0.38396349413298564, |
|
"acc_norm_stderr": 0.01242158783313423 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6274509803921569, |
|
"acc_stderr": 0.03393388584958404, |
|
"acc_norm": 0.6274509803921569, |
|
"acc_norm_stderr": 0.03393388584958404 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.03756335775187896, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.03756335775187896 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2962056303549572, |
|
"mc1_stderr": 0.0159835951018114, |
|
"mc2": 0.47237043227217157, |
|
"mc2_stderr": 0.01595447958123581 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5277449822904369, |
|
"acc_stderr": 0.017163867979456012, |
|
"acc_norm": 0.5796930342384888, |
|
"acc_norm_stderr": 0.016970598281177706 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Minirecord/solar_informal_10.7b", |
|
"model_sha": "40e56be12a5cb6a4de493e31c5397d36fa286497", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |