|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3122866894197952, |
|
"acc_stderr": 0.013542598541688065, |
|
"acc_norm": 0.35238907849829354, |
|
"acc_norm_stderr": 0.013960142600598673 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3403704441346345, |
|
"acc_stderr": 0.004728653488866913, |
|
"acc_norm": 0.4166500697072296, |
|
"acc_norm_stderr": 0.004919962822208309 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.45614035087719296, |
|
"acc_stderr": 0.03820042586602966, |
|
"acc_norm": 0.45614035087719296, |
|
"acc_norm_stderr": 0.03820042586602966 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5242718446601942, |
|
"acc_stderr": 0.04944901092973779, |
|
"acc_norm": 0.5242718446601942, |
|
"acc_norm_stderr": 0.04944901092973779 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.47509578544061304, |
|
"acc_stderr": 0.01785777070490102, |
|
"acc_norm": 0.47509578544061304, |
|
"acc_norm_stderr": 0.01785777070490102 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2814814814814815, |
|
"acc_stderr": 0.03885004245800254, |
|
"acc_norm": 0.2814814814814815, |
|
"acc_norm_stderr": 0.03885004245800254 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4297872340425532, |
|
"acc_stderr": 0.03236214467715564, |
|
"acc_norm": 0.4297872340425532, |
|
"acc_norm_stderr": 0.03236214467715564 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4457831325301205, |
|
"acc_stderr": 0.03869543323472101, |
|
"acc_norm": 0.4457831325301205, |
|
"acc_norm_stderr": 0.03869543323472101 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4630225080385852, |
|
"acc_stderr": 0.02832032583010592, |
|
"acc_norm": 0.4630225080385852, |
|
"acc_norm_stderr": 0.02832032583010592 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.49327354260089684, |
|
"acc_stderr": 0.03355476596234354, |
|
"acc_norm": 0.49327354260089684, |
|
"acc_norm_stderr": 0.03355476596234354 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4961832061068702, |
|
"acc_stderr": 0.04385162325601553, |
|
"acc_norm": 0.4961832061068702, |
|
"acc_norm_stderr": 0.04385162325601553 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145631, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145631 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.494949494949495, |
|
"acc_stderr": 0.035621707606254015, |
|
"acc_norm": 0.494949494949495, |
|
"acc_norm_stderr": 0.035621707606254015 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5241379310344828, |
|
"acc_stderr": 0.0416180850350153, |
|
"acc_norm": 0.5241379310344828, |
|
"acc_norm_stderr": 0.0416180850350153 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.04533838195929776, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.04533838195929776 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4789915966386555, |
|
"acc_stderr": 0.03244980849990029, |
|
"acc_norm": 0.4789915966386555, |
|
"acc_norm_stderr": 0.03244980849990029 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.46153846153846156, |
|
"acc_stderr": 0.025275892070240634, |
|
"acc_norm": 0.46153846153846156, |
|
"acc_norm_stderr": 0.025275892070240634 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.04812917324536823, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.04812917324536823 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.03413963805906235, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.03413963805906235 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45483870967741935, |
|
"acc_stderr": 0.02832774309156106, |
|
"acc_norm": 0.45483870967741935, |
|
"acc_norm_stderr": 0.02832774309156106 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6538461538461539, |
|
"acc_stderr": 0.0311669573672359, |
|
"acc_norm": 0.6538461538461539, |
|
"acc_norm_stderr": 0.0311669573672359 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4226415094339623, |
|
"acc_stderr": 0.03040233144576954, |
|
"acc_norm": 0.4226415094339623, |
|
"acc_norm_stderr": 0.03040233144576954 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.42727272727272725, |
|
"acc_stderr": 0.04738198703545483, |
|
"acc_norm": 0.42727272727272725, |
|
"acc_norm_stderr": 0.04738198703545483 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.02986960509531691, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.02986960509531691 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.037345356767871984, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.037345356767871984 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5522388059701493, |
|
"acc_stderr": 0.03516184772952167, |
|
"acc_norm": 0.5522388059701493, |
|
"acc_norm_stderr": 0.03516184772952167 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.42196531791907516, |
|
"acc_stderr": 0.037657466938651504, |
|
"acc_norm": 0.42196531791907516, |
|
"acc_norm_stderr": 0.037657466938651504 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.544973544973545, |
|
"acc_stderr": 0.025646928361049398, |
|
"acc_norm": 0.544973544973545, |
|
"acc_norm_stderr": 0.025646928361049398 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.03716177437566017, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.03716177437566017 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.04793724854411019, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.04793724854411019 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4797687861271676, |
|
"acc_stderr": 0.026897049996382875, |
|
"acc_norm": 0.4797687861271676, |
|
"acc_norm_stderr": 0.026897049996382875 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.44785276073619634, |
|
"acc_stderr": 0.03906947479456601, |
|
"acc_norm": 0.44785276073619634, |
|
"acc_norm_stderr": 0.03906947479456601 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49382716049382713, |
|
"acc_stderr": 0.027818623962583295, |
|
"acc_norm": 0.49382716049382713, |
|
"acc_norm_stderr": 0.027818623962583295 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.049999999999999996, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.049999999999999996 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.47150259067357514, |
|
"acc_stderr": 0.03602573571288441, |
|
"acc_norm": 0.47150259067357514, |
|
"acc_norm_stderr": 0.03602573571288441 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.38596491228070173, |
|
"acc_stderr": 0.045796394220704355, |
|
"acc_norm": 0.38596491228070173, |
|
"acc_norm_stderr": 0.045796394220704355 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.46605504587155966, |
|
"acc_stderr": 0.02138786335035399, |
|
"acc_norm": 0.46605504587155966, |
|
"acc_norm_stderr": 0.02138786335035399 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4603174603174603, |
|
"acc_stderr": 0.04458029125470973, |
|
"acc_norm": 0.4603174603174603, |
|
"acc_norm_stderr": 0.04458029125470973 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.45098039215686275, |
|
"acc_stderr": 0.02849199358617157, |
|
"acc_norm": 0.45098039215686275, |
|
"acc_norm_stderr": 0.02849199358617157 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.039397364351956274, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.039397364351956274 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39869281045751637, |
|
"acc_stderr": 0.019808281317449848, |
|
"acc_norm": 0.39869281045751637, |
|
"acc_norm_stderr": 0.019808281317449848 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.39361702127659576, |
|
"acc_stderr": 0.02914454478159615, |
|
"acc_norm": 0.39361702127659576, |
|
"acc_norm_stderr": 0.02914454478159615 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4375, |
|
"acc_stderr": 0.04708567521880525, |
|
"acc_norm": 0.4375, |
|
"acc_norm_stderr": 0.04708567521880525 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.03395322726375797, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.03395322726375797 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.28938547486033517, |
|
"acc_stderr": 0.015166544550490317, |
|
"acc_norm": 0.28938547486033517, |
|
"acc_norm_stderr": 0.015166544550490317 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.78, |
|
"acc_stderr": 0.04163331998932261, |
|
"acc_norm": 0.78, |
|
"acc_norm_stderr": 0.04163331998932261 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33088235294117646, |
|
"acc_stderr": 0.02858270975389844, |
|
"acc_norm": 0.33088235294117646, |
|
"acc_norm_stderr": 0.02858270975389844 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5469387755102041, |
|
"acc_stderr": 0.03186785930004128, |
|
"acc_norm": 0.5469387755102041, |
|
"acc_norm_stderr": 0.03186785930004128 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5274261603375527, |
|
"acc_stderr": 0.03249822718301304, |
|
"acc_norm": 0.5274261603375527, |
|
"acc_norm_stderr": 0.03249822718301304 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3428943937418514, |
|
"acc_stderr": 0.012123463271585892, |
|
"acc_norm": 0.3428943937418514, |
|
"acc_norm_stderr": 0.012123463271585892 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.45098039215686275, |
|
"acc_stderr": 0.03492406104163614, |
|
"acc_norm": 0.45098039215686275, |
|
"acc_norm_stderr": 0.03492406104163614 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.47878787878787876, |
|
"acc_stderr": 0.03900828913737302, |
|
"acc_norm": 0.47878787878787876, |
|
"acc_norm_stderr": 0.03900828913737302 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3219094247246022, |
|
"mc1_stderr": 0.016355567611960397, |
|
"mc2": 0.5121087237362004, |
|
"mc2_stderr": 0.01621113484074564 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4923258559622196, |
|
"acc_stderr": 0.017188329219654276, |
|
"acc_norm": 0.5171192443919717, |
|
"acc_norm_stderr": 0.017180275246085626 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "01-ai/Yi-1.5-9B-Chat-16K", |
|
"model_sha": "2b397e5f0fab87984efa66856c5c4ed4bbe68b50", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |