{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.42662116040955633, "acc_stderr": 0.014453185592920293, "acc_norm": 0.48293515358361777, "acc_norm_stderr": 0.014602878388536597 }, "harness|ko_hellaswag|10": { "acc": 0.41037641904003186, "acc_stderr": 0.004908967278222492, "acc_norm": 0.5455088627763394, "acc_norm_stderr": 0.004969070188763748 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5614035087719298, "acc_stderr": 0.0380579750559046, "acc_norm": 0.5614035087719298, "acc_norm_stderr": 0.0380579750559046 }, "harness|ko_mmlu_management|5": { "acc": 0.6116504854368932, "acc_stderr": 0.04825729337356389, "acc_norm": 0.6116504854368932, "acc_norm_stderr": 0.04825729337356389 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5823754789272031, "acc_stderr": 0.0176356373269515, "acc_norm": 0.5823754789272031, "acc_norm_stderr": 0.0176356373269515 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480863, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480863 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4297872340425532, "acc_stderr": 0.03236214467715564, "acc_norm": 0.4297872340425532, "acc_norm_stderr": 0.03236214467715564 }, "harness|ko_mmlu_virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079022, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079022 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5401929260450161, "acc_stderr": 0.028306190403305693, "acc_norm": 0.5401929260450161, "acc_norm_stderr": 0.028306190403305693 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5112107623318386, "acc_stderr": 0.033549366530984746, "acc_norm": 0.5112107623318386, "acc_norm_stderr": 0.033549366530984746 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5038167938931297, "acc_stderr": 0.043851623256015534, "acc_norm": 0.5038167938931297, "acc_norm_stderr": 0.043851623256015534 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6212121212121212, "acc_stderr": 0.03456088731993747, "acc_norm": 0.6212121212121212, "acc_norm_stderr": 0.03456088731993747 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.041618085035015295, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.041618085035015295 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.542016806722689, "acc_stderr": 0.03236361111951941, "acc_norm": 0.542016806722689, "acc_norm_stderr": 0.03236361111951941 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4948717948717949, "acc_stderr": 0.02534967290683867, "acc_norm": 0.4948717948717949, "acc_norm_stderr": 0.02534967290683867 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356461, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356461 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.034819048444388045, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.034819048444388045 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5935483870967742, "acc_stderr": 0.027941727346256304, "acc_norm": 0.5935483870967742, "acc_norm_stderr": 0.027941727346256304 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7307692307692307, "acc_stderr": 0.029058588303748845, "acc_norm": 0.7307692307692307, "acc_norm_stderr": 0.029058588303748845 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5207547169811321, "acc_stderr": 0.03074634997572347, "acc_norm": 0.5207547169811321, "acc_norm_stderr": 0.03074634997572347 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5818181818181818, "acc_stderr": 0.04724577405731572, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.04724577405731572 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871923, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871923 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.03879687024073327, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.03879687024073327 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6766169154228856, "acc_stderr": 0.03307615947979034, "acc_norm": 0.6766169154228856, "acc_norm_stderr": 0.03307615947979034 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.47398843930635837, "acc_stderr": 0.03807301726504511, "acc_norm": 0.47398843930635837, "acc_norm_stderr": 0.03807301726504511 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02487081525105709, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02487081525105709 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.375, "acc_stderr": 0.04048439222695598, "acc_norm": 0.375, "acc_norm_stderr": 0.04048439222695598 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5404624277456648, "acc_stderr": 0.026830805998952233, "acc_norm": 0.5404624277456648, "acc_norm_stderr": 0.026830805998952233 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4723926380368098, "acc_stderr": 0.0392237829061099, "acc_norm": 0.4723926380368098, "acc_norm_stderr": 0.0392237829061099 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5154320987654321, "acc_stderr": 0.02780749004427619, "acc_norm": 0.5154320987654321, "acc_norm_stderr": 0.02780749004427619 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.6373056994818653, "acc_stderr": 0.034697137917043715, "acc_norm": 0.6373056994818653, "acc_norm_stderr": 0.034697137917043715 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6495412844036698, "acc_stderr": 0.020456077599824454, "acc_norm": 0.6495412844036698, "acc_norm_stderr": 0.020456077599824454 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.545751633986928, "acc_stderr": 0.02850980780262659, "acc_norm": 0.545751633986928, "acc_norm_stderr": 0.02850980780262659 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.041391127276354626, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.041391127276354626 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.506578947368421, "acc_stderr": 0.04068590050224971, "acc_norm": 0.506578947368421, "acc_norm_stderr": 0.04068590050224971 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.44281045751633985, "acc_stderr": 0.02009508315457734, "acc_norm": 0.44281045751633985, "acc_norm_stderr": 0.02009508315457734 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.35815602836879434, "acc_stderr": 0.02860208586275941, "acc_norm": 0.35815602836879434, "acc_norm_stderr": 0.02860208586275941 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3392857142857143, "acc_stderr": 0.04493949068613539, "acc_norm": 0.3392857142857143, "acc_norm_stderr": 0.04493949068613539 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.03344887382997866, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.03344887382997866 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2770949720670391, "acc_stderr": 0.014968772435812145, "acc_norm": 0.2770949720670391, "acc_norm_stderr": 0.014968772435812145 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3786764705882353, "acc_stderr": 0.02946513363977613, "acc_norm": 0.3786764705882353, "acc_norm_stderr": 0.02946513363977613 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.6204081632653061, "acc_stderr": 0.031067211262872464, "acc_norm": 0.6204081632653061, "acc_norm_stderr": 0.031067211262872464 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6497890295358649, "acc_stderr": 0.031052391937584346, "acc_norm": 0.6497890295358649, "acc_norm_stderr": 0.031052391937584346 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.35267275097783574, "acc_stderr": 0.012203286846053886, "acc_norm": 0.35267275097783574, "acc_norm_stderr": 0.012203286846053886 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5637254901960784, "acc_stderr": 0.03480693138457039, "acc_norm": 0.5637254901960784, "acc_norm_stderr": 0.03480693138457039 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.593939393939394, "acc_stderr": 0.03834816355401181, "acc_norm": 0.593939393939394, "acc_norm_stderr": 0.03834816355401181 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396704, "mc2": 0.4382557590409575, "mc2_stderr": 0.015307727969976953 }, "harness|ko_commongen_v2|2": { "acc": 0.4332939787485242, "acc_stderr": 0.017036683641893098, "acc_norm": 0.51357733175915, "acc_norm_stderr": 0.01718401506040146 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Changgil/K2S3-Mistral-7b-v1.3", "model_sha": "3825ea65280f33aad5dab2d8b51a0af776f8e4a6", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }