|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.35665529010238906, |
|
"acc_stderr": 0.013998056902620196, |
|
"acc_norm": 0.41467576791808874, |
|
"acc_norm_stderr": 0.014397070564409174 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.38329018123879705, |
|
"acc_stderr": 0.004851944170671259, |
|
"acc_norm": 0.4987054371639116, |
|
"acc_norm_stderr": 0.004989764686738831 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.52046783625731, |
|
"acc_stderr": 0.0383161053282193, |
|
"acc_norm": 0.52046783625731, |
|
"acc_norm_stderr": 0.0383161053282193 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5339805825242718, |
|
"acc_stderr": 0.04939291447273481, |
|
"acc_norm": 0.5339805825242718, |
|
"acc_norm_stderr": 0.04939291447273481 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5159642401021711, |
|
"acc_stderr": 0.017870847506081738, |
|
"acc_norm": 0.5159642401021711, |
|
"acc_norm_stderr": 0.017870847506081738 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3925925925925926, |
|
"acc_stderr": 0.04218506215368879, |
|
"acc_norm": 0.3925925925925926, |
|
"acc_norm_stderr": 0.04218506215368879 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.37872340425531914, |
|
"acc_stderr": 0.031709956060406545, |
|
"acc_norm": 0.37872340425531914, |
|
"acc_norm_stderr": 0.031709956060406545 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.39156626506024095, |
|
"acc_stderr": 0.03799857454479636, |
|
"acc_norm": 0.39156626506024095, |
|
"acc_norm_stderr": 0.03799857454479636 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4790996784565916, |
|
"acc_stderr": 0.028373270961069414, |
|
"acc_norm": 0.4790996784565916, |
|
"acc_norm_stderr": 0.028373270961069414 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.49327354260089684, |
|
"acc_stderr": 0.03355476596234353, |
|
"acc_norm": 0.49327354260089684, |
|
"acc_norm_stderr": 0.03355476596234353 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4732824427480916, |
|
"acc_stderr": 0.04379024936553894, |
|
"acc_norm": 0.4732824427480916, |
|
"acc_norm_stderr": 0.04379024936553894 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.035402943770953675, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.035402943770953675 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.46206896551724136, |
|
"acc_stderr": 0.041546596717075474, |
|
"acc_norm": 0.46206896551724136, |
|
"acc_norm_stderr": 0.041546596717075474 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.13725490196078433, |
|
"acc_stderr": 0.03424084669891524, |
|
"acc_norm": 0.13725490196078433, |
|
"acc_norm_stderr": 0.03424084669891524 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.41596638655462187, |
|
"acc_stderr": 0.03201650100739615, |
|
"acc_norm": 0.41596638655462187, |
|
"acc_norm_stderr": 0.03201650100739615 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4230769230769231, |
|
"acc_stderr": 0.025049197876042335, |
|
"acc_norm": 0.4230769230769231, |
|
"acc_norm_stderr": 0.025049197876042335 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.034381579670365446, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.034381579670365446 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.432258064516129, |
|
"acc_stderr": 0.028181739720019413, |
|
"acc_norm": 0.432258064516129, |
|
"acc_norm_stderr": 0.028181739720019413 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7307692307692307, |
|
"acc_stderr": 0.029058588303748842, |
|
"acc_norm": 0.7307692307692307, |
|
"acc_norm_stderr": 0.029058588303748842 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.41509433962264153, |
|
"acc_stderr": 0.030325945789286105, |
|
"acc_norm": 0.41509433962264153, |
|
"acc_norm_stderr": 0.030325945789286105 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5181818181818182, |
|
"acc_stderr": 0.04785964010794915, |
|
"acc_norm": 0.5181818181818182, |
|
"acc_norm_stderr": 0.04785964010794915 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.02889774874113114, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.02889774874113114 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.03631329803969653, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.03631329803969653 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6019900497512438, |
|
"acc_stderr": 0.03461199429040014, |
|
"acc_norm": 0.6019900497512438, |
|
"acc_norm_stderr": 0.03461199429040014 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.35260115606936415, |
|
"acc_stderr": 0.036430371689585496, |
|
"acc_norm": 0.35260115606936415, |
|
"acc_norm_stderr": 0.036430371689585496 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.0248708152510571, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.0248708152510571 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.04076663253918567, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.04076663253918567 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5202312138728323, |
|
"acc_stderr": 0.026897049996382868, |
|
"acc_norm": 0.5202312138728323, |
|
"acc_norm_stderr": 0.026897049996382868 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.43558282208588955, |
|
"acc_stderr": 0.03895632464138937, |
|
"acc_norm": 0.43558282208588955, |
|
"acc_norm_stderr": 0.03895632464138937 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.44135802469135804, |
|
"acc_stderr": 0.027628737155668784, |
|
"acc_norm": 0.44135802469135804, |
|
"acc_norm_stderr": 0.027628737155668784 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5751295336787565, |
|
"acc_stderr": 0.035674713352125395, |
|
"acc_norm": 0.5751295336787565, |
|
"acc_norm_stderr": 0.035674713352125395 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.32456140350877194, |
|
"acc_stderr": 0.04404556157374768, |
|
"acc_norm": 0.32456140350877194, |
|
"acc_norm_stderr": 0.04404556157374768 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.46605504587155966, |
|
"acc_stderr": 0.021387863350354, |
|
"acc_norm": 0.46605504587155966, |
|
"acc_norm_stderr": 0.021387863350354 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4738562091503268, |
|
"acc_stderr": 0.028590752958852394, |
|
"acc_norm": 0.4738562091503268, |
|
"acc_norm_stderr": 0.028590752958852394 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5950413223140496, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.34868421052631576, |
|
"acc_stderr": 0.038781398887976104, |
|
"acc_norm": 0.34868421052631576, |
|
"acc_norm_stderr": 0.038781398887976104 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.40032679738562094, |
|
"acc_stderr": 0.01982184368827177, |
|
"acc_norm": 0.40032679738562094, |
|
"acc_norm_stderr": 0.01982184368827177 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.30141843971631205, |
|
"acc_stderr": 0.027374128882631157, |
|
"acc_norm": 0.30141843971631205, |
|
"acc_norm_stderr": 0.027374128882631157 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.043994650575715215, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.043994650575715215 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.30092592592592593, |
|
"acc_stderr": 0.031280390843298825, |
|
"acc_norm": 0.30092592592592593, |
|
"acc_norm_stderr": 0.031280390843298825 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.014355911964767864, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.014355911964767864 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3272058823529412, |
|
"acc_stderr": 0.02850145286039656, |
|
"acc_norm": 0.3272058823529412, |
|
"acc_norm_stderr": 0.02850145286039656 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.44081632653061226, |
|
"acc_stderr": 0.03178419114175363, |
|
"acc_norm": 0.44081632653061226, |
|
"acc_norm_stderr": 0.03178419114175363 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5611814345991561, |
|
"acc_stderr": 0.032302649315470375, |
|
"acc_norm": 0.5611814345991561, |
|
"acc_norm_stderr": 0.032302649315470375 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3344198174706649, |
|
"acc_stderr": 0.012049668983214936, |
|
"acc_norm": 0.3344198174706649, |
|
"acc_norm_stderr": 0.012049668983214936 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.44607843137254904, |
|
"acc_stderr": 0.03488845451304974, |
|
"acc_norm": 0.44607843137254904, |
|
"acc_norm_stderr": 0.03488845451304974 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.47878787878787876, |
|
"acc_stderr": 0.03900828913737301, |
|
"acc_norm": 0.47878787878787876, |
|
"acc_norm_stderr": 0.03900828913737301 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2864137086903305, |
|
"mc1_stderr": 0.01582614243950235, |
|
"mc2": 0.453794908688158, |
|
"mc2_stderr": 0.015317536289389658 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3600944510035419, |
|
"acc_stderr": 0.016503686720440072, |
|
"acc_norm": 0.48760330578512395, |
|
"acc_norm_stderr": 0.017185069732676514 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "BM-K/mistral-7b-it-v1.0.1", |
|
"model_sha": "710fbce5dd54e5794f1bcdf4f53d3c0ceeafb405", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |