|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.31313993174061433, |
|
"acc_stderr": 0.013552671543623494, |
|
"acc_norm": 0.3575085324232082, |
|
"acc_norm_stderr": 0.014005494275916573 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.37064329814777935, |
|
"acc_stderr": 0.004819899945342492, |
|
"acc_norm": 0.4643497311292571, |
|
"acc_norm_stderr": 0.004977081808179427 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.39766081871345027, |
|
"acc_stderr": 0.0375363895576169, |
|
"acc_norm": 0.39766081871345027, |
|
"acc_norm_stderr": 0.0375363895576169 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.34951456310679613, |
|
"acc_stderr": 0.04721188506097173, |
|
"acc_norm": 0.34951456310679613, |
|
"acc_norm_stderr": 0.04721188506097173 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.41507024265644954, |
|
"acc_stderr": 0.01762013700365527, |
|
"acc_norm": 0.41507024265644954, |
|
"acc_norm_stderr": 0.01762013700365527 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.31851851851851853, |
|
"acc_stderr": 0.040247784019771096, |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.040247784019771096 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.33191489361702126, |
|
"acc_stderr": 0.030783736757745643, |
|
"acc_norm": 0.33191489361702126, |
|
"acc_norm_stderr": 0.030783736757745643 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3072289156626506, |
|
"acc_stderr": 0.03591566797824664, |
|
"acc_norm": 0.3072289156626506, |
|
"acc_norm_stderr": 0.03591566797824664 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.36012861736334406, |
|
"acc_stderr": 0.027264297599804012, |
|
"acc_norm": 0.36012861736334406, |
|
"acc_norm_stderr": 0.027264297599804012 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4125560538116592, |
|
"acc_stderr": 0.03304062175449296, |
|
"acc_norm": 0.4125560538116592, |
|
"acc_norm_stderr": 0.03304062175449296 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3511450381679389, |
|
"acc_stderr": 0.04186445163013751, |
|
"acc_norm": 0.3511450381679389, |
|
"acc_norm_stderr": 0.04186445163013751 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3434343434343434, |
|
"acc_stderr": 0.033832012232444426, |
|
"acc_norm": 0.3434343434343434, |
|
"acc_norm_stderr": 0.033832012232444426 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.31724137931034485, |
|
"acc_stderr": 0.03878352372138622, |
|
"acc_norm": 0.31724137931034485, |
|
"acc_norm_stderr": 0.03878352372138622 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.04280105837364396, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.04280105837364396 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3445378151260504, |
|
"acc_stderr": 0.030868682604121633, |
|
"acc_norm": 0.3445378151260504, |
|
"acc_norm_stderr": 0.030868682604121633 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3128205128205128, |
|
"acc_stderr": 0.02350757902064535, |
|
"acc_norm": 0.3128205128205128, |
|
"acc_norm_stderr": 0.02350757902064535 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.047500773411999854, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.047500773411999854 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.35467980295566504, |
|
"acc_stderr": 0.033661244890514495, |
|
"acc_norm": 0.35467980295566504, |
|
"acc_norm_stderr": 0.033661244890514495 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.02606936229533513, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.02606936229533513 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6196581196581197, |
|
"acc_stderr": 0.031804252043840985, |
|
"acc_norm": 0.6196581196581197, |
|
"acc_norm_stderr": 0.031804252043840985 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.30566037735849055, |
|
"acc_stderr": 0.028353298073322663, |
|
"acc_norm": 0.30566037735849055, |
|
"acc_norm_stderr": 0.028353298073322663 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.37272727272727274, |
|
"acc_stderr": 0.04631381319425464, |
|
"acc_norm": 0.37272727272727274, |
|
"acc_norm_stderr": 0.04631381319425464 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2851851851851852, |
|
"acc_stderr": 0.027528599210340496, |
|
"acc_norm": 0.2851851851851852, |
|
"acc_norm_stderr": 0.027528599210340496 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.038227469376587525, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.038227469376587525 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.373134328358209, |
|
"acc_stderr": 0.034198326081760065, |
|
"acc_norm": 0.373134328358209, |
|
"acc_norm_stderr": 0.034198326081760065 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2774566473988439, |
|
"acc_stderr": 0.03414014007044036, |
|
"acc_norm": 0.2774566473988439, |
|
"acc_norm_stderr": 0.03414014007044036 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.02391998416404774, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.02391998416404774 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2986111111111111, |
|
"acc_stderr": 0.03827052357950756, |
|
"acc_norm": 0.2986111111111111, |
|
"acc_norm_stderr": 0.03827052357950756 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.025906632631016127, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.025906632631016127 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3312883435582822, |
|
"acc_stderr": 0.03697983910025588, |
|
"acc_norm": 0.3312883435582822, |
|
"acc_norm_stderr": 0.03697983910025588 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3765432098765432, |
|
"acc_stderr": 0.02695934451874778, |
|
"acc_norm": 0.3765432098765432, |
|
"acc_norm_stderr": 0.02695934451874778 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.3316062176165803, |
|
"acc_stderr": 0.03397636541089116, |
|
"acc_norm": 0.3316062176165803, |
|
"acc_norm_stderr": 0.03397636541089116 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.04303684033537315, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537315 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3321100917431193, |
|
"acc_stderr": 0.020192682985423337, |
|
"acc_norm": 0.3321100917431193, |
|
"acc_norm_stderr": 0.020192682985423337 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.18253968253968253, |
|
"acc_stderr": 0.034550710191021475, |
|
"acc_norm": 0.18253968253968253, |
|
"acc_norm_stderr": 0.034550710191021475 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.33986928104575165, |
|
"acc_stderr": 0.027121956071388852, |
|
"acc_norm": 0.33986928104575165, |
|
"acc_norm_stderr": 0.027121956071388852 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5702479338842975, |
|
"acc_stderr": 0.04519082021319771, |
|
"acc_norm": 0.5702479338842975, |
|
"acc_norm_stderr": 0.04519082021319771 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.28289473684210525, |
|
"acc_stderr": 0.03665349695640767, |
|
"acc_norm": 0.28289473684210525, |
|
"acc_norm_stderr": 0.03665349695640767 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3088235294117647, |
|
"acc_stderr": 0.01869085027359529, |
|
"acc_norm": 0.3088235294117647, |
|
"acc_norm_stderr": 0.01869085027359529 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.29432624113475175, |
|
"acc_stderr": 0.02718712701150381, |
|
"acc_norm": 0.29432624113475175, |
|
"acc_norm_stderr": 0.02718712701150381 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04547960999764376, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04547960999764376 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.029531221160930918, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.029531221160930918 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26145251396648045, |
|
"acc_stderr": 0.014696599650364548, |
|
"acc_norm": 0.26145251396648045, |
|
"acc_norm_stderr": 0.014696599650364548 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.026799562024887674, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.026799562024887674 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.27346938775510204, |
|
"acc_stderr": 0.028535560337128438, |
|
"acc_norm": 0.27346938775510204, |
|
"acc_norm_stderr": 0.028535560337128438 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.34177215189873417, |
|
"acc_stderr": 0.030874537537553617, |
|
"acc_norm": 0.34177215189873417, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.26792698826597133, |
|
"acc_stderr": 0.011311347690633885, |
|
"acc_norm": 0.26792698826597133, |
|
"acc_norm_stderr": 0.011311347690633885 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.28921568627450983, |
|
"acc_stderr": 0.03182231867647553, |
|
"acc_norm": 0.28921568627450983, |
|
"acc_norm_stderr": 0.03182231867647553 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.32727272727272727, |
|
"acc_stderr": 0.03663974994391244, |
|
"acc_norm": 0.32727272727272727, |
|
"acc_norm_stderr": 0.03663974994391244 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2717258261933905, |
|
"mc1_stderr": 0.015572840452875835, |
|
"mc2": 0.4450037389871468, |
|
"mc2_stderr": 0.01574377596952645 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2632821723730815, |
|
"acc_stderr": 0.015141752199573208, |
|
"acc_norm": 0.3624557260920897, |
|
"acc_norm_stderr": 0.016527131240453716 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "mncai/Mistral-7B-v0.1-orca-1k", |
|
"model_sha": "3bfedee0d952da852fefa84e70f6373174a1deaf", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |