|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4121160409556314, |
|
"acc_stderr": 0.0143839153022254, |
|
"acc_norm": 0.47525597269624575, |
|
"acc_norm_stderr": 0.014593487694937736 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.2984465245966939, |
|
"acc_stderr": 0.004566412808642454, |
|
"acc_norm": 0.34475204142601074, |
|
"acc_norm_stderr": 0.004743160034271155 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.38011695906432746, |
|
"acc_stderr": 0.037229657413855394, |
|
"acc_norm": 0.38011695906432746, |
|
"acc_norm_stderr": 0.037229657413855394 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5631067961165048, |
|
"acc_stderr": 0.04911147107365777, |
|
"acc_norm": 0.5631067961165048, |
|
"acc_norm_stderr": 0.04911147107365777 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5134099616858238, |
|
"acc_stderr": 0.017873531736510365, |
|
"acc_norm": 0.5134099616858238, |
|
"acc_norm_stderr": 0.017873531736510365 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.42962962962962964, |
|
"acc_stderr": 0.04276349494376599, |
|
"acc_norm": 0.42962962962962964, |
|
"acc_norm_stderr": 0.04276349494376599 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.39574468085106385, |
|
"acc_stderr": 0.03196758697835363, |
|
"acc_norm": 0.39574468085106385, |
|
"acc_norm_stderr": 0.03196758697835363 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42168674698795183, |
|
"acc_stderr": 0.03844453181770917, |
|
"acc_norm": 0.42168674698795183, |
|
"acc_norm_stderr": 0.03844453181770917 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5144694533762058, |
|
"acc_stderr": 0.028386198084177673, |
|
"acc_norm": 0.5144694533762058, |
|
"acc_norm_stderr": 0.028386198084177673 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.47533632286995514, |
|
"acc_stderr": 0.033516951676526276, |
|
"acc_norm": 0.47533632286995514, |
|
"acc_norm_stderr": 0.033516951676526276 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5114503816793893, |
|
"acc_stderr": 0.04384140024078016, |
|
"acc_norm": 0.5114503816793893, |
|
"acc_norm_stderr": 0.04384140024078016 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6111111111111112, |
|
"acc_stderr": 0.0347327959083696, |
|
"acc_norm": 0.6111111111111112, |
|
"acc_norm_stderr": 0.0347327959083696 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.42758620689655175, |
|
"acc_stderr": 0.04122737111370332, |
|
"acc_norm": 0.42758620689655175, |
|
"acc_norm_stderr": 0.04122737111370332 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.04488482852329017, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.04488482852329017 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5084033613445378, |
|
"acc_stderr": 0.0324739027656967, |
|
"acc_norm": 0.5084033613445378, |
|
"acc_norm_stderr": 0.0324739027656967 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4794871794871795, |
|
"acc_stderr": 0.025329663163489943, |
|
"acc_norm": 0.4794871794871795, |
|
"acc_norm_stderr": 0.025329663163489943 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04605661864718381, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04605661864718381 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.04812917324536823, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.04812917324536823 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.03413963805906235, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.03413963805906235 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.532258064516129, |
|
"acc_stderr": 0.028384747788813332, |
|
"acc_norm": 0.532258064516129, |
|
"acc_norm_stderr": 0.028384747788813332 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7606837606837606, |
|
"acc_stderr": 0.027951826808924333, |
|
"acc_norm": 0.7606837606837606, |
|
"acc_norm_stderr": 0.027951826808924333 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.45660377358490567, |
|
"acc_stderr": 0.03065674869673943, |
|
"acc_norm": 0.45660377358490567, |
|
"acc_norm_stderr": 0.03065674869673943 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5818181818181818, |
|
"acc_stderr": 0.04724577405731572, |
|
"acc_norm": 0.5818181818181818, |
|
"acc_norm_stderr": 0.04724577405731572 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.028897748741131143, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.028897748741131143 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.038227469376587525, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.038227469376587525 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6268656716417911, |
|
"acc_stderr": 0.034198326081760065, |
|
"acc_norm": 0.6268656716417911, |
|
"acc_norm_stderr": 0.034198326081760065 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3872832369942196, |
|
"acc_stderr": 0.03714325906302065, |
|
"acc_norm": 0.3872832369942196, |
|
"acc_norm_stderr": 0.03714325906302065 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3994708994708995, |
|
"acc_stderr": 0.025225450284067877, |
|
"acc_norm": 0.3994708994708995, |
|
"acc_norm_stderr": 0.025225450284067877 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4236111111111111, |
|
"acc_stderr": 0.04132125019723369, |
|
"acc_norm": 0.4236111111111111, |
|
"acc_norm_stderr": 0.04132125019723369 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.68, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.68, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5317919075144508, |
|
"acc_stderr": 0.026864624366756656, |
|
"acc_norm": 0.5317919075144508, |
|
"acc_norm_stderr": 0.026864624366756656 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.49079754601226994, |
|
"acc_stderr": 0.039277056007874414, |
|
"acc_norm": 0.49079754601226994, |
|
"acc_norm_stderr": 0.039277056007874414 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5092592592592593, |
|
"acc_stderr": 0.027815973433878014, |
|
"acc_norm": 0.5092592592592593, |
|
"acc_norm_stderr": 0.027815973433878014 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.34196891191709844, |
|
"acc_stderr": 0.03423465100104282, |
|
"acc_norm": 0.34196891191709844, |
|
"acc_norm_stderr": 0.03423465100104282 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.34210526315789475, |
|
"acc_stderr": 0.04462917535336937, |
|
"acc_norm": 0.34210526315789475, |
|
"acc_norm_stderr": 0.04462917535336937 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.563302752293578, |
|
"acc_stderr": 0.02126482015871421, |
|
"acc_norm": 0.563302752293578, |
|
"acc_norm_stderr": 0.02126482015871421 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04285714285714281, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04285714285714281 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4738562091503268, |
|
"acc_stderr": 0.028590752958852394, |
|
"acc_norm": 0.4738562091503268, |
|
"acc_norm_stderr": 0.028590752958852394 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.042943408452120926, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.042943408452120926 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5328947368421053, |
|
"acc_stderr": 0.040601270352363966, |
|
"acc_norm": 0.5328947368421053, |
|
"acc_norm_stderr": 0.040601270352363966 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4199346405228758, |
|
"acc_stderr": 0.019966811178256483, |
|
"acc_norm": 0.4199346405228758, |
|
"acc_norm_stderr": 0.019966811178256483 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3900709219858156, |
|
"acc_stderr": 0.029097675599463926, |
|
"acc_norm": 0.3900709219858156, |
|
"acc_norm_stderr": 0.029097675599463926 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.043994650575715215, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.043994650575715215 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.033016908987210894, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.033016908987210894 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.293854748603352, |
|
"acc_stderr": 0.015235075776719603, |
|
"acc_norm": 0.293854748603352, |
|
"acc_norm_stderr": 0.015235075776719603 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3602941176470588, |
|
"acc_stderr": 0.029163128570670733, |
|
"acc_norm": 0.3602941176470588, |
|
"acc_norm_stderr": 0.029163128570670733 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5224489795918368, |
|
"acc_stderr": 0.03197694118713672, |
|
"acc_norm": 0.5224489795918368, |
|
"acc_norm_stderr": 0.03197694118713672 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7130801687763713, |
|
"acc_stderr": 0.029443773022594693, |
|
"acc_norm": 0.7130801687763713, |
|
"acc_norm_stderr": 0.029443773022594693 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.378748370273794, |
|
"acc_stderr": 0.01238905210500373, |
|
"acc_norm": 0.378748370273794, |
|
"acc_norm_stderr": 0.01238905210500373 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5049019607843137, |
|
"acc_stderr": 0.03509143375606786, |
|
"acc_norm": 0.5049019607843137, |
|
"acc_norm_stderr": 0.03509143375606786 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5333333333333333, |
|
"acc_stderr": 0.03895658065271846, |
|
"acc_norm": 0.5333333333333333, |
|
"acc_norm_stderr": 0.03895658065271846 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3353733170134639, |
|
"mc1_stderr": 0.016527534039668987, |
|
"mc2": 0.4883439191552012, |
|
"mc2_stderr": 0.01576336696184338 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.10153482880755609, |
|
"acc_stderr": 0.010384198041619998, |
|
"acc_norm": 0.30814639905548996, |
|
"acc_norm_stderr": 0.01587451515629839 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DopeorNope/SOLAR_C-v1-10.7B", |
|
"model_sha": "9521d07028323f3055664fe03904caeac51b6141", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |