|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.27303754266211605, |
|
"acc_stderr": 0.013019332762635734, |
|
"acc_norm": 0.32081911262798635, |
|
"acc_norm_stderr": 0.013640943091946522 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35461063533160725, |
|
"acc_stderr": 0.004774174590205146, |
|
"acc_norm": 0.4547898824935272, |
|
"acc_norm_stderr": 0.004969341773423515 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30409356725146197, |
|
"acc_stderr": 0.03528211258245232, |
|
"acc_norm": 0.30409356725146197, |
|
"acc_norm_stderr": 0.03528211258245232 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.17475728155339806, |
|
"acc_stderr": 0.03760178006026621, |
|
"acc_norm": 0.17475728155339806, |
|
"acc_norm_stderr": 0.03760178006026621 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2541507024265645, |
|
"acc_stderr": 0.015569254692045785, |
|
"acc_norm": 0.2541507024265645, |
|
"acc_norm_stderr": 0.015569254692045785 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.03820169914517905, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.03820169914517905 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3148936170212766, |
|
"acc_stderr": 0.03036358219723817, |
|
"acc_norm": 0.3148936170212766, |
|
"acc_norm_stderr": 0.03036358219723817 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3132530120481928, |
|
"acc_stderr": 0.03610805018031023, |
|
"acc_norm": 0.3132530120481928, |
|
"acc_norm_stderr": 0.03610805018031023 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2733118971061093, |
|
"acc_stderr": 0.02531176597542612, |
|
"acc_norm": 0.2733118971061093, |
|
"acc_norm_stderr": 0.02531176597542612 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3004484304932735, |
|
"acc_stderr": 0.030769352008229136, |
|
"acc_norm": 0.3004484304932735, |
|
"acc_norm_stderr": 0.030769352008229136 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.26717557251908397, |
|
"acc_stderr": 0.038808483010823965, |
|
"acc_norm": 0.26717557251908397, |
|
"acc_norm_stderr": 0.038808483010823965 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.22727272727272727, |
|
"acc_stderr": 0.029857515673386414, |
|
"acc_norm": 0.22727272727272727, |
|
"acc_norm_stderr": 0.029857515673386414 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2413793103448276, |
|
"acc_stderr": 0.03565998174135302, |
|
"acc_norm": 0.2413793103448276, |
|
"acc_norm_stderr": 0.03565998174135302 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.040925639582376536, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.040925639582376536 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.026653531596715494, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.026653531596715494 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.23076923076923078, |
|
"acc_stderr": 0.02136202772522271, |
|
"acc_norm": 0.23076923076923078, |
|
"acc_norm_stderr": 0.02136202772522271 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932269, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932269 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.042365112580946315, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.042365112580946315 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.20689655172413793, |
|
"acc_stderr": 0.02850137816789395, |
|
"acc_norm": 0.20689655172413793, |
|
"acc_norm_stderr": 0.02850137816789395 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.24516129032258063, |
|
"acc_stderr": 0.024472243840895514, |
|
"acc_norm": 0.24516129032258063, |
|
"acc_norm_stderr": 0.024472243840895514 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3076923076923077, |
|
"acc_stderr": 0.030236389942173106, |
|
"acc_norm": 0.3076923076923077, |
|
"acc_norm_stderr": 0.030236389942173106 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.20754716981132076, |
|
"acc_stderr": 0.024959918028911274, |
|
"acc_norm": 0.20754716981132076, |
|
"acc_norm_stderr": 0.024959918028911274 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.22727272727272727, |
|
"acc_stderr": 0.04013964554072775, |
|
"acc_norm": 0.22727272727272727, |
|
"acc_norm_stderr": 0.04013964554072775 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.027309140588230175, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.027309140588230175 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.03822746937658753, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.03822746937658753 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24378109452736318, |
|
"acc_stderr": 0.030360490154014624, |
|
"acc_norm": 0.24378109452736318, |
|
"acc_norm_stderr": 0.030360490154014624 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.1907514450867052, |
|
"acc_stderr": 0.029957851329869337, |
|
"acc_norm": 0.1907514450867052, |
|
"acc_norm_stderr": 0.029957851329869337 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.22486772486772486, |
|
"acc_stderr": 0.02150209607822914, |
|
"acc_norm": 0.22486772486772486, |
|
"acc_norm_stderr": 0.02150209607822914 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.20833333333333334, |
|
"acc_stderr": 0.03396116205845333, |
|
"acc_norm": 0.20833333333333334, |
|
"acc_norm_stderr": 0.03396116205845333 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.24566473988439305, |
|
"acc_stderr": 0.02317629820399201, |
|
"acc_norm": 0.24566473988439305, |
|
"acc_norm_stderr": 0.02317629820399201 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2883435582822086, |
|
"acc_stderr": 0.035590395316173425, |
|
"acc_norm": 0.2883435582822086, |
|
"acc_norm_stderr": 0.035590395316173425 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2839506172839506, |
|
"acc_stderr": 0.025089478523765127, |
|
"acc_norm": 0.2839506172839506, |
|
"acc_norm_stderr": 0.025089478523765127 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24870466321243523, |
|
"acc_stderr": 0.031195840877700307, |
|
"acc_norm": 0.24870466321243523, |
|
"acc_norm_stderr": 0.031195840877700307 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.25504587155963304, |
|
"acc_stderr": 0.018688500856535856, |
|
"acc_norm": 0.25504587155963304, |
|
"acc_norm_stderr": 0.018688500856535856 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1984126984126984, |
|
"acc_stderr": 0.03567016675276863, |
|
"acc_norm": 0.1984126984126984, |
|
"acc_norm_stderr": 0.03567016675276863 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.0252616912197295, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.0252616912197295 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.35537190082644626, |
|
"acc_stderr": 0.04369236326573981, |
|
"acc_norm": 0.35537190082644626, |
|
"acc_norm_stderr": 0.04369236326573981 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.18421052631578946, |
|
"acc_stderr": 0.0315469804508223, |
|
"acc_norm": 0.18421052631578946, |
|
"acc_norm_stderr": 0.0315469804508223 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.018054027458815198, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.018054027458815198 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307857, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307857 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.20535714285714285, |
|
"acc_stderr": 0.03834241021419074, |
|
"acc_norm": 0.20535714285714285, |
|
"acc_norm_stderr": 0.03834241021419074 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2916666666666667, |
|
"acc_stderr": 0.030998666304560534, |
|
"acc_norm": 0.2916666666666667, |
|
"acc_norm_stderr": 0.030998666304560534 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.40441176470588236, |
|
"acc_stderr": 0.029812630701569743, |
|
"acc_norm": 0.40441176470588236, |
|
"acc_norm_stderr": 0.029812630701569743 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.23673469387755103, |
|
"acc_stderr": 0.02721283588407315, |
|
"acc_norm": 0.23673469387755103, |
|
"acc_norm_stderr": 0.02721283588407315 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.27848101265822783, |
|
"acc_stderr": 0.02917868230484256, |
|
"acc_norm": 0.27848101265822783, |
|
"acc_norm_stderr": 0.02917868230484256 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2646675358539765, |
|
"acc_stderr": 0.011267332992845516, |
|
"acc_norm": 0.2646675358539765, |
|
"acc_norm_stderr": 0.011267332992845516 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3088235294117647, |
|
"acc_stderr": 0.03242661719827218, |
|
"acc_norm": 0.3088235294117647, |
|
"acc_norm_stderr": 0.03242661719827218 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2909090909090909, |
|
"acc_stderr": 0.03546563019624336, |
|
"acc_norm": 0.2909090909090909, |
|
"acc_norm_stderr": 0.03546563019624336 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26560587515299877, |
|
"mc1_stderr": 0.015461027627253595, |
|
"mc2": 0.41796426846893153, |
|
"mc2_stderr": 0.014850155003426721 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.24557260920897284, |
|
"acc_stderr": 0.014798357154972804, |
|
"acc_norm": 0.33293978748524206, |
|
"acc_norm_stderr": 0.01620243120837379 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v1.0", |
|
"model_sha": "6c8dac3a43480d8231306dc1ed7ca5f6a2b9b90f", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |