|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.197098976109215, |
|
"acc_stderr": 0.011625047669880612, |
|
"acc_norm": 0.26535836177474403, |
|
"acc_norm_stderr": 0.012902554762313964 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.29904401513642703, |
|
"acc_stderr": 0.0045690346133326004, |
|
"acc_norm": 0.36675960963951404, |
|
"acc_norm_stderr": 0.0048093520750089385 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03218093795602357, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03218093795602357 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.20388349514563106, |
|
"acc_stderr": 0.0398913985953177, |
|
"acc_norm": 0.20388349514563106, |
|
"acc_norm_stderr": 0.0398913985953177 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2796934865900383, |
|
"acc_stderr": 0.016050792148036536, |
|
"acc_norm": 0.2796934865900383, |
|
"acc_norm_stderr": 0.016050792148036536 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.32592592592592595, |
|
"acc_stderr": 0.040491220417025055, |
|
"acc_norm": 0.32592592592592595, |
|
"acc_norm_stderr": 0.040491220417025055 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.23404255319148937, |
|
"acc_stderr": 0.027678452578212387, |
|
"acc_norm": 0.23404255319148937, |
|
"acc_norm_stderr": 0.027678452578212387 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.21084337349397592, |
|
"acc_stderr": 0.031755547866299194, |
|
"acc_norm": 0.21084337349397592, |
|
"acc_norm_stderr": 0.031755547866299194 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2765273311897106, |
|
"acc_stderr": 0.02540383297817961, |
|
"acc_norm": 0.2765273311897106, |
|
"acc_norm_stderr": 0.02540383297817961 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2825112107623318, |
|
"acc_stderr": 0.030216831011508762, |
|
"acc_norm": 0.2825112107623318, |
|
"acc_norm_stderr": 0.030216831011508762 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.24427480916030533, |
|
"acc_stderr": 0.03768335959728742, |
|
"acc_norm": 0.24427480916030533, |
|
"acc_norm_stderr": 0.03768335959728742 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2676767676767677, |
|
"acc_stderr": 0.03154449888270285, |
|
"acc_norm": 0.2676767676767677, |
|
"acc_norm_stderr": 0.03154449888270285 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.25517241379310346, |
|
"acc_stderr": 0.03632984052707842, |
|
"acc_norm": 0.25517241379310346, |
|
"acc_norm_stderr": 0.03632984052707842 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083286, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083286 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.027553614467863786, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.027553614467863786 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2641025641025641, |
|
"acc_stderr": 0.022352193737453285, |
|
"acc_norm": 0.2641025641025641, |
|
"acc_norm_stderr": 0.022352193737453285 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2315270935960591, |
|
"acc_stderr": 0.029678333141444444, |
|
"acc_norm": 0.2315270935960591, |
|
"acc_norm_stderr": 0.029678333141444444 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3096774193548387, |
|
"acc_stderr": 0.026302774983517414, |
|
"acc_norm": 0.3096774193548387, |
|
"acc_norm_stderr": 0.026302774983517414 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.030882736974138663, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.030882736974138663 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2641509433962264, |
|
"acc_stderr": 0.0271342916287417, |
|
"acc_norm": 0.2641509433962264, |
|
"acc_norm_stderr": 0.0271342916287417 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.20909090909090908, |
|
"acc_stderr": 0.038950910157241364, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.038950910157241364 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.03734535676787198, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.03734535676787198 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.29850746268656714, |
|
"acc_stderr": 0.032357437893550424, |
|
"acc_norm": 0.29850746268656714, |
|
"acc_norm_stderr": 0.032357437893550424 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.031568093627031744, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.031568093627031744 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24603174603174602, |
|
"acc_stderr": 0.022182037202948368, |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.022182037202948368 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03476590104304134, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03476590104304134 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909282, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909282 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23410404624277456, |
|
"acc_stderr": 0.022797110278071128, |
|
"acc_norm": 0.23410404624277456, |
|
"acc_norm_stderr": 0.022797110278071128 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.37423312883435583, |
|
"acc_stderr": 0.03802068102899616, |
|
"acc_norm": 0.37423312883435583, |
|
"acc_norm_stderr": 0.03802068102899616 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2654320987654321, |
|
"acc_stderr": 0.024569223600460845, |
|
"acc_norm": 0.2654320987654321, |
|
"acc_norm_stderr": 0.024569223600460845 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165065, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165065 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2538860103626943, |
|
"acc_stderr": 0.03141024780565318, |
|
"acc_norm": 0.2538860103626943, |
|
"acc_norm_stderr": 0.03141024780565318 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.039994238792813344, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.039994238792813344 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.28807339449541286, |
|
"acc_stderr": 0.019416445892636015, |
|
"acc_norm": 0.28807339449541286, |
|
"acc_norm_stderr": 0.019416445892636015 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.20634920634920634, |
|
"acc_stderr": 0.0361960452412425, |
|
"acc_norm": 0.20634920634920634, |
|
"acc_norm_stderr": 0.0361960452412425 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.025646863097137908, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.025646863097137908 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.38016528925619836, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.38016528925619836, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2236842105263158, |
|
"acc_stderr": 0.03391160934343602, |
|
"acc_norm": 0.2236842105263158, |
|
"acc_norm_stderr": 0.03391160934343602 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.01784808957491323, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.01784808957491323 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.25886524822695034, |
|
"acc_stderr": 0.026129572527180848, |
|
"acc_norm": 0.25886524822695034, |
|
"acc_norm_stderr": 0.026129572527180848 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.030225226160012386, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.030225226160012386 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24581005586592178, |
|
"acc_stderr": 0.014400296429225605, |
|
"acc_norm": 0.24581005586592178, |
|
"acc_norm_stderr": 0.014400296429225605 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.038612291966536955, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.038612291966536955 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.35661764705882354, |
|
"acc_stderr": 0.029097209568411962, |
|
"acc_norm": 0.35661764705882354, |
|
"acc_norm_stderr": 0.029097209568411962 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24489795918367346, |
|
"acc_stderr": 0.027529637440174913, |
|
"acc_norm": 0.24489795918367346, |
|
"acc_norm_stderr": 0.027529637440174913 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3291139240506329, |
|
"acc_stderr": 0.03058732629470236, |
|
"acc_norm": 0.3291139240506329, |
|
"acc_norm_stderr": 0.03058732629470236 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2522816166883963, |
|
"acc_stderr": 0.011092789056875248, |
|
"acc_norm": 0.2522816166883963, |
|
"acc_norm_stderr": 0.011092789056875248 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.031321798030832904, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.031321798030832904 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.03825460278380026, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.03825460278380026 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2692778457772338, |
|
"mc1_stderr": 0.015528566637087288, |
|
"mc2": 0.4755864114164748, |
|
"mc2_stderr": 0.016657423214439165 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.1912632821723731, |
|
"acc_stderr": 0.013521790445859333, |
|
"acc_norm": 0.3659976387249115, |
|
"acc_norm_stderr": 0.016561489664895686 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Jaewoo1/Platypus7B_Follow_FT", |
|
"model_sha": "ac5c77ab817d2d9b0a4f3fc7c609dce3770428d8", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |