|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2773037542662116, |
|
"acc_stderr": 0.013082095839059374, |
|
"acc_norm": 0.32764505119453924, |
|
"acc_norm_stderr": 0.013715847940719346 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.34863572993427605, |
|
"acc_stderr": 0.00475564501626385, |
|
"acc_norm": 0.4313881696873133, |
|
"acc_norm_stderr": 0.004942578520987342 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.03508771929824565, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.03508771929824565 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.17475728155339806, |
|
"acc_stderr": 0.03760178006026621, |
|
"acc_norm": 0.17475728155339806, |
|
"acc_norm_stderr": 0.03760178006026621 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.26309067688378035, |
|
"acc_stderr": 0.01574549716904906, |
|
"acc_norm": 0.26309067688378035, |
|
"acc_norm_stderr": 0.01574549716904906 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.040943762699967946, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.040943762699967946 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2851063829787234, |
|
"acc_stderr": 0.029513196625539355, |
|
"acc_norm": 0.2851063829787234, |
|
"acc_norm_stderr": 0.029513196625539355 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3132530120481928, |
|
"acc_stderr": 0.036108050180310235, |
|
"acc_norm": 0.3132530120481928, |
|
"acc_norm_stderr": 0.036108050180310235 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3022508038585209, |
|
"acc_stderr": 0.02608270069539966, |
|
"acc_norm": 0.3022508038585209, |
|
"acc_norm_stderr": 0.02608270069539966 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.21973094170403587, |
|
"acc_stderr": 0.0277901770643836, |
|
"acc_norm": 0.21973094170403587, |
|
"acc_norm_stderr": 0.0277901770643836 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2748091603053435, |
|
"acc_stderr": 0.03915345408847835, |
|
"acc_norm": 0.2748091603053435, |
|
"acc_norm_stderr": 0.03915345408847835 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384739, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384739 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2474747474747475, |
|
"acc_stderr": 0.030746300742124505, |
|
"acc_norm": 0.2474747474747475, |
|
"acc_norm_stderr": 0.030746300742124505 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2827586206896552, |
|
"acc_stderr": 0.03752833958003336, |
|
"acc_norm": 0.2827586206896552, |
|
"acc_norm_stderr": 0.03752833958003336 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179962, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179962 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2605042016806723, |
|
"acc_stderr": 0.02851025151234191, |
|
"acc_norm": 0.2605042016806723, |
|
"acc_norm_stderr": 0.02851025151234191 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.020280805062535722, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.020280805062535722 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.32407407407407407, |
|
"acc_stderr": 0.04524596007030048, |
|
"acc_norm": 0.32407407407407407, |
|
"acc_norm_stderr": 0.04524596007030048 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2019704433497537, |
|
"acc_stderr": 0.028247350122180267, |
|
"acc_norm": 0.2019704433497537, |
|
"acc_norm_stderr": 0.028247350122180267 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.267741935483871, |
|
"acc_stderr": 0.02518900666021238, |
|
"acc_norm": 0.267741935483871, |
|
"acc_norm_stderr": 0.02518900666021238 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.27350427350427353, |
|
"acc_stderr": 0.029202540153431183, |
|
"acc_norm": 0.27350427350427353, |
|
"acc_norm_stderr": 0.029202540153431183 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24528301886792453, |
|
"acc_stderr": 0.026480357179895712, |
|
"acc_norm": 0.24528301886792453, |
|
"acc_norm_stderr": 0.026480357179895712 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.24545454545454545, |
|
"acc_stderr": 0.04122066502878285, |
|
"acc_norm": 0.24545454545454545, |
|
"acc_norm_stderr": 0.04122066502878285 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.026719240783712156, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.026719240783712156 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389024, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.263681592039801, |
|
"acc_stderr": 0.031157150869355575, |
|
"acc_norm": 0.263681592039801, |
|
"acc_norm_stderr": 0.031157150869355575 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.26011560693641617, |
|
"acc_stderr": 0.03345036916788991, |
|
"acc_norm": 0.26011560693641617, |
|
"acc_norm_stderr": 0.03345036916788991 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.022644212615525214, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.022644212615525214 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.03716177437566017, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.03716177437566017 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.040201512610368445, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.040201512610368445 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2658959537572254, |
|
"acc_stderr": 0.023786203255508283, |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.023786203255508283 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2822085889570552, |
|
"acc_stderr": 0.03536117886664743, |
|
"acc_norm": 0.2822085889570552, |
|
"acc_norm_stderr": 0.03536117886664743 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2716049382716049, |
|
"acc_stderr": 0.02474862449053737, |
|
"acc_norm": 0.2716049382716049, |
|
"acc_norm_stderr": 0.02474862449053737 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24870466321243523, |
|
"acc_stderr": 0.03119584087770029, |
|
"acc_norm": 0.24870466321243523, |
|
"acc_norm_stderr": 0.03119584087770029 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.040969851398436716, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436716 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.23853211009174313, |
|
"acc_stderr": 0.018272575810231867, |
|
"acc_norm": 0.23853211009174313, |
|
"acc_norm_stderr": 0.018272575810231867 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1349206349206349, |
|
"acc_stderr": 0.030557101589417515, |
|
"acc_norm": 0.1349206349206349, |
|
"acc_norm_stderr": 0.030557101589417515 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.238562091503268, |
|
"acc_stderr": 0.024404394928087866, |
|
"acc_norm": 0.238562091503268, |
|
"acc_norm_stderr": 0.024404394928087866 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036624, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036624 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.371900826446281, |
|
"acc_stderr": 0.04412015806624503, |
|
"acc_norm": 0.371900826446281, |
|
"acc_norm_stderr": 0.04412015806624503 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.26973684210526316, |
|
"acc_stderr": 0.03611780560284898, |
|
"acc_norm": 0.26973684210526316, |
|
"acc_norm_stderr": 0.03611780560284898 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.28104575163398693, |
|
"acc_stderr": 0.01818521895431809, |
|
"acc_norm": 0.28104575163398693, |
|
"acc_norm_stderr": 0.01818521895431809 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2375886524822695, |
|
"acc_stderr": 0.025389512552729903, |
|
"acc_norm": 0.2375886524822695, |
|
"acc_norm_stderr": 0.025389512552729903 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697623, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697623 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.22685185185185186, |
|
"acc_stderr": 0.028561650102422273, |
|
"acc_norm": 0.22685185185185186, |
|
"acc_norm_stderr": 0.028561650102422273 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2446927374301676, |
|
"acc_stderr": 0.014378169884098426, |
|
"acc_norm": 0.2446927374301676, |
|
"acc_norm_stderr": 0.014378169884098426 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.1875, |
|
"acc_stderr": 0.023709788253811766, |
|
"acc_norm": 0.1875, |
|
"acc_norm_stderr": 0.023709788253811766 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24489795918367346, |
|
"acc_stderr": 0.027529637440174934, |
|
"acc_norm": 0.24489795918367346, |
|
"acc_norm_stderr": 0.027529637440174934 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.270042194092827, |
|
"acc_stderr": 0.028900721906293426, |
|
"acc_norm": 0.270042194092827, |
|
"acc_norm_stderr": 0.028900721906293426 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24315514993481094, |
|
"acc_stderr": 0.010956556654417353, |
|
"acc_norm": 0.24315514993481094, |
|
"acc_norm_stderr": 0.010956556654417353 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.030587591351604246, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.030587591351604246 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.32727272727272727, |
|
"acc_stderr": 0.03663974994391242, |
|
"acc_norm": 0.32727272727272727, |
|
"acc_norm_stderr": 0.03663974994391242 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2423500611995104, |
|
"mc1_stderr": 0.015000674373570342, |
|
"mc2": 0.415216441138711, |
|
"mc2_stderr": 0.015096025074072256 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.33215962441314556, |
|
"acc_stderr": 0.016145257507387774, |
|
"acc_norm": 0.40492957746478875, |
|
"acc_norm_stderr": 0.01682709522397798 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "amphora/small-instruct", |
|
"model_sha": "f7187ec82340f592a33ec4b22d02cfbc935886de", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |