|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3046075085324232, |
|
"acc_stderr": 0.013449522109932487, |
|
"acc_norm": 0.3438566552901024, |
|
"acc_norm_stderr": 0.013880644570156222 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36875124477195775, |
|
"acc_stderr": 0.0048148030984368154, |
|
"acc_norm": 0.4697271459868552, |
|
"acc_norm_stderr": 0.00498062728714758 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.52046783625731, |
|
"acc_stderr": 0.0383161053282193, |
|
"acc_norm": 0.52046783625731, |
|
"acc_norm_stderr": 0.0383161053282193 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5436893203883495, |
|
"acc_stderr": 0.049318019942204146, |
|
"acc_norm": 0.5436893203883495, |
|
"acc_norm_stderr": 0.049318019942204146 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4648786717752235, |
|
"acc_stderr": 0.01783579880629064, |
|
"acc_norm": 0.4648786717752235, |
|
"acc_norm_stderr": 0.01783579880629064 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.362962962962963, |
|
"acc_stderr": 0.041539484047424, |
|
"acc_norm": 0.362962962962963, |
|
"acc_norm_stderr": 0.041539484047424 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.42127659574468085, |
|
"acc_stderr": 0.03227834510146267, |
|
"acc_norm": 0.42127659574468085, |
|
"acc_norm_stderr": 0.03227834510146267 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.03828401115079021, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.03828401115079021 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4340836012861736, |
|
"acc_stderr": 0.0281502322445356, |
|
"acc_norm": 0.4340836012861736, |
|
"acc_norm_stderr": 0.0281502322445356 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.43946188340807174, |
|
"acc_stderr": 0.03331092511038179, |
|
"acc_norm": 0.43946188340807174, |
|
"acc_norm_stderr": 0.03331092511038179 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578757, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578757 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.0356071651653106, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.0356071651653106 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5103448275862069, |
|
"acc_stderr": 0.04165774775728763, |
|
"acc_norm": 0.5103448275862069, |
|
"acc_norm_stderr": 0.04165774775728763 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.17647058823529413, |
|
"acc_stderr": 0.03793281185307809, |
|
"acc_norm": 0.17647058823529413, |
|
"acc_norm_stderr": 0.03793281185307809 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4831932773109244, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.4831932773109244, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4307692307692308, |
|
"acc_stderr": 0.025106820660539743, |
|
"acc_norm": 0.4307692307692308, |
|
"acc_norm_stderr": 0.025106820660539743 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.04830366024635331, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.04830366024635331 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43842364532019706, |
|
"acc_stderr": 0.03491207857486518, |
|
"acc_norm": 0.43842364532019706, |
|
"acc_norm_stderr": 0.03491207857486518 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45483870967741935, |
|
"acc_stderr": 0.028327743091561053, |
|
"acc_norm": 0.45483870967741935, |
|
"acc_norm_stderr": 0.028327743091561053 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7094017094017094, |
|
"acc_stderr": 0.029745048572674078, |
|
"acc_norm": 0.7094017094017094, |
|
"acc_norm_stderr": 0.029745048572674078 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4075471698113208, |
|
"acc_stderr": 0.0302422338008545, |
|
"acc_norm": 0.4075471698113208, |
|
"acc_norm_stderr": 0.0302422338008545 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5272727272727272, |
|
"acc_stderr": 0.04782001791380061, |
|
"acc_norm": 0.5272727272727272, |
|
"acc_norm_stderr": 0.04782001791380061 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34814814814814815, |
|
"acc_stderr": 0.029045600290616258, |
|
"acc_norm": 0.34814814814814815, |
|
"acc_norm_stderr": 0.029045600290616258 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.03710185726119995, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.03710185726119995 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6019900497512438, |
|
"acc_stderr": 0.034611994290400135, |
|
"acc_norm": 0.6019900497512438, |
|
"acc_norm_stderr": 0.034611994290400135 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3815028901734104, |
|
"acc_stderr": 0.03703851193099521, |
|
"acc_norm": 0.3815028901734104, |
|
"acc_norm_stderr": 0.03703851193099521 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.335978835978836, |
|
"acc_stderr": 0.024326310529149138, |
|
"acc_norm": 0.335978835978836, |
|
"acc_norm_stderr": 0.024326310529149138 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.037738099906869334, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.037738099906869334 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49421965317919075, |
|
"acc_stderr": 0.026917296179149116, |
|
"acc_norm": 0.49421965317919075, |
|
"acc_norm_stderr": 0.026917296179149116 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4601226993865031, |
|
"acc_stderr": 0.03915857291436971, |
|
"acc_norm": 0.4601226993865031, |
|
"acc_norm_stderr": 0.03915857291436971 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.43209876543209874, |
|
"acc_stderr": 0.027563010971606676, |
|
"acc_norm": 0.43209876543209874, |
|
"acc_norm_stderr": 0.027563010971606676 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.47150259067357514, |
|
"acc_stderr": 0.036025735712884414, |
|
"acc_norm": 0.47150259067357514, |
|
"acc_norm_stderr": 0.036025735712884414 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.04598188057816542, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.04598188057816542 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.45504587155963305, |
|
"acc_stderr": 0.021350503090925167, |
|
"acc_norm": 0.45504587155963305, |
|
"acc_norm_stderr": 0.021350503090925167 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.041349130183033156, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.041349130183033156 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.028629916715693413, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.028629916715693413 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6446280991735537, |
|
"acc_stderr": 0.0436923632657398, |
|
"acc_norm": 0.6446280991735537, |
|
"acc_norm_stderr": 0.0436923632657398 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.34868421052631576, |
|
"acc_stderr": 0.038781398887976104, |
|
"acc_norm": 0.34868421052631576, |
|
"acc_norm_stderr": 0.038781398887976104 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3790849673202614, |
|
"acc_stderr": 0.01962744474841223, |
|
"acc_norm": 0.3790849673202614, |
|
"acc_norm_stderr": 0.01962744474841223 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.028663820147199492, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.028663820147199492 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.38392857142857145, |
|
"acc_stderr": 0.04616143075028547, |
|
"acc_norm": 0.38392857142857145, |
|
"acc_norm_stderr": 0.04616143075028547 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.03309682581119035, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.03309682581119035 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.014333522059217892, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.014333522059217892 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3382352941176471, |
|
"acc_stderr": 0.028739328513983583, |
|
"acc_norm": 0.3382352941176471, |
|
"acc_norm_stderr": 0.028739328513983583 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.46938775510204084, |
|
"acc_stderr": 0.031949171367580624, |
|
"acc_norm": 0.46938775510204084, |
|
"acc_norm_stderr": 0.031949171367580624 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5063291139240507, |
|
"acc_stderr": 0.0325446201076786, |
|
"acc_norm": 0.5063291139240507, |
|
"acc_norm_stderr": 0.0325446201076786 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2985658409387223, |
|
"acc_stderr": 0.011688060141794231, |
|
"acc_norm": 0.2985658409387223, |
|
"acc_norm_stderr": 0.011688060141794231 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.03460228327239171, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.03460228327239171 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.038881769216741, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.038881769216741 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3108935128518972, |
|
"mc1_stderr": 0.016203316673559693, |
|
"mc2": 0.48747691141114763, |
|
"mc2_stderr": 0.015615664106933899 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4935064935064935, |
|
"acc_stderr": 0.017188904359077318, |
|
"acc_norm": 0.5301062573789846, |
|
"acc_norm_stderr": 0.017159163590170216 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "mncai/Mistral-7B-v0.1-alpaca-1k", |
|
"model_sha": "97a2cb89d4f19712842c4e29c44e1b7821905fac", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |