|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3822525597269625, |
|
"acc_stderr": 0.014200454049979279, |
|
"acc_norm": 0.4402730375426621, |
|
"acc_norm_stderr": 0.014506769524804243 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.41894045010953995, |
|
"acc_stderr": 0.004923772581848503, |
|
"acc_norm": 0.5714997012547302, |
|
"acc_norm_stderr": 0.004938500303990289 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4678362573099415, |
|
"acc_stderr": 0.038268824176603704, |
|
"acc_norm": 0.4678362573099415, |
|
"acc_norm_stderr": 0.038268824176603704 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5242718446601942, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.5242718446601942, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.01776925058353325, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.01776925058353325 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4740740740740741, |
|
"acc_stderr": 0.04313531696750574, |
|
"acc_norm": 0.4740740740740741, |
|
"acc_norm_stderr": 0.04313531696750574 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4127659574468085, |
|
"acc_stderr": 0.03218471141400351, |
|
"acc_norm": 0.4127659574468085, |
|
"acc_norm_stderr": 0.03218471141400351 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.41566265060240964, |
|
"acc_stderr": 0.03836722176598052, |
|
"acc_norm": 0.41566265060240964, |
|
"acc_norm_stderr": 0.03836722176598052 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5241157556270096, |
|
"acc_stderr": 0.028365041542564584, |
|
"acc_norm": 0.5241157556270096, |
|
"acc_norm_stderr": 0.028365041542564584 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5246636771300448, |
|
"acc_stderr": 0.03351695167652628, |
|
"acc_norm": 0.5246636771300448, |
|
"acc_norm_stderr": 0.03351695167652628 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5190839694656488, |
|
"acc_stderr": 0.043820947055509867, |
|
"acc_norm": 0.5190839694656488, |
|
"acc_norm_stderr": 0.043820947055509867 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5707070707070707, |
|
"acc_stderr": 0.03526552724601198, |
|
"acc_norm": 0.5707070707070707, |
|
"acc_norm_stderr": 0.03526552724601198 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.43448275862068964, |
|
"acc_stderr": 0.041307408795554966, |
|
"acc_norm": 0.43448275862068964, |
|
"acc_norm_stderr": 0.041307408795554966 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.040233822736177476, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.040233822736177476 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.47058823529411764, |
|
"acc_stderr": 0.032422250271150074, |
|
"acc_norm": 0.47058823529411764, |
|
"acc_norm_stderr": 0.032422250271150074 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4025641025641026, |
|
"acc_stderr": 0.024864995159767762, |
|
"acc_norm": 0.4025641025641026, |
|
"acc_norm_stderr": 0.024864995159767762 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.66, |
|
"acc_stderr": 0.04760952285695237, |
|
"acc_norm": 0.66, |
|
"acc_norm_stderr": 0.04760952285695237 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.46296296296296297, |
|
"acc_stderr": 0.04820403072760627, |
|
"acc_norm": 0.46296296296296297, |
|
"acc_norm_stderr": 0.04820403072760627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3694581280788177, |
|
"acc_stderr": 0.03395970381998574, |
|
"acc_norm": 0.3694581280788177, |
|
"acc_norm_stderr": 0.03395970381998574 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5129032258064516, |
|
"acc_stderr": 0.02843453315268187, |
|
"acc_norm": 0.5129032258064516, |
|
"acc_norm_stderr": 0.02843453315268187 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6709401709401709, |
|
"acc_stderr": 0.03078232157768817, |
|
"acc_norm": 0.6709401709401709, |
|
"acc_norm_stderr": 0.03078232157768817 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4377358490566038, |
|
"acc_stderr": 0.030533338430467516, |
|
"acc_norm": 0.4377358490566038, |
|
"acc_norm_stderr": 0.030533338430467516 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5181818181818182, |
|
"acc_stderr": 0.04785964010794916, |
|
"acc_norm": 0.5181818181818182, |
|
"acc_norm_stderr": 0.04785964010794916 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.026067159222275798, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.026067159222275798 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5223880597014925, |
|
"acc_stderr": 0.03531987930208731, |
|
"acc_norm": 0.5223880597014925, |
|
"acc_norm_stderr": 0.03531987930208731 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3930635838150289, |
|
"acc_stderr": 0.037242495958177295, |
|
"acc_norm": 0.3930635838150289, |
|
"acc_norm_stderr": 0.037242495958177295 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.023517294335963286, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.023517294335963286 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4236111111111111, |
|
"acc_stderr": 0.041321250197233685, |
|
"acc_norm": 0.4236111111111111, |
|
"acc_norm_stderr": 0.041321250197233685 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.48554913294797686, |
|
"acc_stderr": 0.02690784985628254, |
|
"acc_norm": 0.48554913294797686, |
|
"acc_norm_stderr": 0.02690784985628254 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4539877300613497, |
|
"acc_stderr": 0.0391170190467718, |
|
"acc_norm": 0.4539877300613497, |
|
"acc_norm_stderr": 0.0391170190467718 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4845679012345679, |
|
"acc_stderr": 0.0278074900442762, |
|
"acc_norm": 0.4845679012345679, |
|
"acc_norm_stderr": 0.0278074900442762 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5181347150259067, |
|
"acc_stderr": 0.036060650018329185, |
|
"acc_norm": 0.5181347150259067, |
|
"acc_norm_stderr": 0.036060650018329185 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.041857744240220575, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.041857744240220575 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5614678899082569, |
|
"acc_stderr": 0.021274713073954562, |
|
"acc_norm": 0.5614678899082569, |
|
"acc_norm_stderr": 0.021274713073954562 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.0393253768039287, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.0393253768039287 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.02827549015679143, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.02827549015679143 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6528925619834711, |
|
"acc_stderr": 0.04345724570292535, |
|
"acc_norm": 0.6528925619834711, |
|
"acc_norm_stderr": 0.04345724570292535 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.03988903703336284, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.03988903703336284 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4035947712418301, |
|
"acc_stderr": 0.01984828016840115, |
|
"acc_norm": 0.4035947712418301, |
|
"acc_norm_stderr": 0.01984828016840115 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32269503546099293, |
|
"acc_stderr": 0.027889139300534778, |
|
"acc_norm": 0.32269503546099293, |
|
"acc_norm_stderr": 0.027889139300534778 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.03214952147802749, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.03214952147802749 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25921787709497207, |
|
"acc_stderr": 0.01465578083749773, |
|
"acc_norm": 0.25921787709497207, |
|
"acc_norm_stderr": 0.01465578083749773 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33455882352941174, |
|
"acc_stderr": 0.028661996202335314, |
|
"acc_norm": 0.33455882352941174, |
|
"acc_norm_stderr": 0.028661996202335314 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.43673469387755104, |
|
"acc_stderr": 0.03175195237583322, |
|
"acc_norm": 0.43673469387755104, |
|
"acc_norm_stderr": 0.03175195237583322 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6286919831223629, |
|
"acc_stderr": 0.03145068600744858, |
|
"acc_norm": 0.6286919831223629, |
|
"acc_norm_stderr": 0.03145068600744858 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.34810951760104303, |
|
"acc_stderr": 0.0121667389936982, |
|
"acc_norm": 0.34810951760104303, |
|
"acc_norm_stderr": 0.0121667389936982 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5294117647058824, |
|
"acc_stderr": 0.03503235296367994, |
|
"acc_norm": 0.5294117647058824, |
|
"acc_norm_stderr": 0.03503235296367994 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5696969696969697, |
|
"acc_stderr": 0.038662259628790774, |
|
"acc_norm": 0.5696969696969697, |
|
"acc_norm_stderr": 0.038662259628790774 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24724602203182375, |
|
"mc1_stderr": 0.015102404797359649, |
|
"mc2": 0.4028203603366851, |
|
"mc2_stderr": 0.01477432836961688 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.38961038961038963, |
|
"acc_stderr": 0.0167661616718935, |
|
"acc_norm": 0.45218417945690675, |
|
"acc_norm_stderr": 0.017111567130916782 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/aift-llama2-koen-instruct-v1.1", |
|
"model_sha": "202c3e3df0c4a321503df8d4c78da213f1ae5475", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |