|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3643344709897611, |
|
"acc_stderr": 0.014063260279882417, |
|
"acc_norm": 0.4377133105802048, |
|
"acc_norm_stderr": 0.014497573881108285 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.41416052579167495, |
|
"acc_stderr": 0.004915697886906119, |
|
"acc_norm": 0.5499900418243377, |
|
"acc_norm_stderr": 0.004964779805180654 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5497076023391813, |
|
"acc_stderr": 0.038158273659132366, |
|
"acc_norm": 0.5497076023391813, |
|
"acc_norm_stderr": 0.038158273659132366 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.04865777570410769, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.04865777570410769 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5734355044699873, |
|
"acc_stderr": 0.017686066975675648, |
|
"acc_norm": 0.5734355044699873, |
|
"acc_norm_stderr": 0.017686066975675648 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4222222222222222, |
|
"acc_stderr": 0.04266763404099582, |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.04266763404099582 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.42127659574468085, |
|
"acc_stderr": 0.03227834510146267, |
|
"acc_norm": 0.42127659574468085, |
|
"acc_norm_stderr": 0.03227834510146267 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3614457831325301, |
|
"acc_stderr": 0.03740059382029319, |
|
"acc_norm": 0.3614457831325301, |
|
"acc_norm_stderr": 0.03740059382029319 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5209003215434084, |
|
"acc_stderr": 0.028373270961069414, |
|
"acc_norm": 0.5209003215434084, |
|
"acc_norm_stderr": 0.028373270961069414 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4439461883408072, |
|
"acc_stderr": 0.03334625674242728, |
|
"acc_norm": 0.4439461883408072, |
|
"acc_norm_stderr": 0.03334625674242728 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5725190839694656, |
|
"acc_stderr": 0.043389203057924014, |
|
"acc_norm": 0.5725190839694656, |
|
"acc_norm_stderr": 0.043389203057924014 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6414141414141414, |
|
"acc_stderr": 0.03416903640391521, |
|
"acc_norm": 0.6414141414141414, |
|
"acc_norm_stderr": 0.03416903640391521 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5586206896551724, |
|
"acc_stderr": 0.04137931034482757, |
|
"acc_norm": 0.5586206896551724, |
|
"acc_norm_stderr": 0.04137931034482757 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.042801058373643945, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.042801058373643945 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5084033613445378, |
|
"acc_stderr": 0.03247390276569669, |
|
"acc_norm": 0.5084033613445378, |
|
"acc_norm_stderr": 0.03247390276569669 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4717948717948718, |
|
"acc_stderr": 0.025310639254933903, |
|
"acc_norm": 0.4717948717948718, |
|
"acc_norm_stderr": 0.025310639254933903 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.04793724854411021, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.04793724854411021 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5185185185185185, |
|
"acc_stderr": 0.04830366024635331, |
|
"acc_norm": 0.5185185185185185, |
|
"acc_norm_stderr": 0.04830366024635331 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.034381579670365446, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.034381579670365446 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.49032258064516127, |
|
"acc_stderr": 0.028438677998909558, |
|
"acc_norm": 0.49032258064516127, |
|
"acc_norm_stderr": 0.028438677998909558 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7008547008547008, |
|
"acc_stderr": 0.029996951858349483, |
|
"acc_norm": 0.7008547008547008, |
|
"acc_norm_stderr": 0.029996951858349483 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4716981132075472, |
|
"acc_stderr": 0.030723535249006107, |
|
"acc_norm": 0.4716981132075472, |
|
"acc_norm_stderr": 0.030723535249006107 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5181818181818182, |
|
"acc_stderr": 0.04785964010794916, |
|
"acc_norm": 0.5181818181818182, |
|
"acc_norm_stderr": 0.04785964010794916 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.02831753349606648, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.02831753349606648 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3576158940397351, |
|
"acc_stderr": 0.03913453431177258, |
|
"acc_norm": 0.3576158940397351, |
|
"acc_norm_stderr": 0.03913453431177258 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.572139303482587, |
|
"acc_stderr": 0.03498541988407795, |
|
"acc_norm": 0.572139303482587, |
|
"acc_norm_stderr": 0.03498541988407795 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4161849710982659, |
|
"acc_stderr": 0.037585177754049466, |
|
"acc_norm": 0.4161849710982659, |
|
"acc_norm_stderr": 0.037585177754049466 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.335978835978836, |
|
"acc_stderr": 0.024326310529149152, |
|
"acc_norm": 0.335978835978836, |
|
"acc_norm_stderr": 0.024326310529149152 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4861111111111111, |
|
"acc_stderr": 0.04179596617581, |
|
"acc_norm": 0.4861111111111111, |
|
"acc_norm_stderr": 0.04179596617581 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.476878612716763, |
|
"acc_stderr": 0.026890297881303125, |
|
"acc_norm": 0.476878612716763, |
|
"acc_norm_stderr": 0.026890297881303125 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.027777777777777797, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.027777777777777797 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5854922279792746, |
|
"acc_stderr": 0.03555300319557669, |
|
"acc_norm": 0.5854922279792746, |
|
"acc_norm_stderr": 0.03555300319557669 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.35964912280701755, |
|
"acc_stderr": 0.04514496132873632, |
|
"acc_norm": 0.35964912280701755, |
|
"acc_norm_stderr": 0.04514496132873632 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6165137614678899, |
|
"acc_stderr": 0.020847156641915984, |
|
"acc_norm": 0.6165137614678899, |
|
"acc_norm_stderr": 0.020847156641915984 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.0416345303130286, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.0416345303130286 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.46405228758169936, |
|
"acc_stderr": 0.028555827516528787, |
|
"acc_norm": 0.46405228758169936, |
|
"acc_norm_stderr": 0.028555827516528787 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6446280991735537, |
|
"acc_stderr": 0.0436923632657398, |
|
"acc_norm": 0.6446280991735537, |
|
"acc_norm_stderr": 0.0436923632657398 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4473684210526316, |
|
"acc_stderr": 0.040463368839782514, |
|
"acc_norm": 0.4473684210526316, |
|
"acc_norm_stderr": 0.040463368839782514 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4199346405228758, |
|
"acc_stderr": 0.019966811178256477, |
|
"acc_norm": 0.4199346405228758, |
|
"acc_norm_stderr": 0.019966811178256477 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.02826765748265014, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.02826765748265014 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.38392857142857145, |
|
"acc_stderr": 0.04616143075028545, |
|
"acc_norm": 0.38392857142857145, |
|
"acc_norm_stderr": 0.04616143075028545 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.033016908987210894, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.033016908987210894 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2636871508379888, |
|
"acc_stderr": 0.014736926383761994, |
|
"acc_norm": 0.2636871508379888, |
|
"acc_norm_stderr": 0.014736926383761994 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41544117647058826, |
|
"acc_stderr": 0.029935342707877746, |
|
"acc_norm": 0.41544117647058826, |
|
"acc_norm_stderr": 0.029935342707877746 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4448979591836735, |
|
"acc_stderr": 0.031814251181977865, |
|
"acc_norm": 0.4448979591836735, |
|
"acc_norm_stderr": 0.031814251181977865 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5527426160337553, |
|
"acc_stderr": 0.03236564251614192, |
|
"acc_norm": 0.5527426160337553, |
|
"acc_norm_stderr": 0.03236564251614192 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.32985658409387225, |
|
"acc_stderr": 0.012008129938540486, |
|
"acc_norm": 0.32985658409387225, |
|
"acc_norm_stderr": 0.012008129938540486 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.03509312031717982, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.03509312031717982 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5636363636363636, |
|
"acc_stderr": 0.03872592983524754, |
|
"acc_norm": 0.5636363636363636, |
|
"acc_norm_stderr": 0.03872592983524754 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2876376988984088, |
|
"mc1_stderr": 0.015846315101394812, |
|
"mc2": 0.441552259633933, |
|
"mc2_stderr": 0.015274593381980957 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5525383707201889, |
|
"acc_stderr": 0.01709519030150058, |
|
"acc_norm": 0.5608028335301063, |
|
"acc_norm_stderr": 0.017062775744780705 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ITT-AF/ITT-Yi-Ko-6B-v4.0", |
|
"model_sha": "55bafef5b47043503949a3b663903c58689d444f", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |